def SqliteConnect(): con = sqlite3.connect(':memory:') con.create_aggregate('ArgMin', 3, ArgMin) con.create_aggregate('ArgMax', 3, ArgMax) con.create_aggregate('DistinctListAgg', 1, DistinctListAgg) con.create_aggregate('ARRAY_CONCAT_AGG', 1, ArrayConcatAgg) con.create_function('PrintToConsole', 1, PrintToConsole) con.create_function('ARRAY_CONCAT', 2, ArrayConcat) con.create_function('JOIN_STRINGS', 2, Join) con.create_function('ReadFile', 1, ReadFile) con.create_function('WriteFile', 2, WriteFile) con.create_function('SQRT', 1, lambda x: float(x)**0.5) con.create_function('POW', 2, lambda x, p: float(x)**p) con.create_function('Exp', 1, lambda x: math.exp(x)) con.create_function('Log', 1, lambda x: math.log(x)) con.create_function('Sin', 1, lambda x: math.sin(x)) con.create_function('Cos', 1, lambda x: math.cos(x)) con.create_function('Asin', 1, lambda x: math.asin(x)) con.create_function('Acos', 1, lambda x: math.acos(x)) con.create_function('Split', 2, lambda x, y: json.dumps((x.split(y)))) con.create_function('ARRAY_TO_STRING', 2, lambda x, y: y.join(x)) con.create_function('SortList', 1, SortList) con.create_function('MagicalEntangle', 2, lambda x, y: x) con.create_function('IN_LIST', 2, InList) con.create_function('ERROR', 1, UserError) con.create_function('Fingerprint', 1, Fingerprint) con.create_function('Floor', 1, math.floor) sqlite3.enable_callback_tracebacks(True) return con
def enable_callback_tracebacks(flag): """ To get tracebacks from in user-defined functions, aggregates, converters, authorizer callbacks etc. """ assert isinstance(flag, bool),\ NPR.niceassert('enable_callback_traceback: wrong argument:[{!s}]' .format(flag)) _success = True try: lite.enable_callback_tracebacks(flag) except lite.Error as err: _success = False NPR.niceerror(caught=True, caughtprefix='+++ DB', caughtcode='001', caughtmsg='DB error on [{!s}]: [{!s}]'.format( 'Enable callback', err.args[0]), useniceprint=True) logging.info('<-- DBHelper.enable_callback [%s] _success:[%s]', flag, _success) return _success
async def connect(cls, path: Union[bytes, str], *args, **kwargs): sqlite3.enable_callback_tracebacks(True) def _connect(): return sqlite3.connect(path, *args, **kwargs) db = cls() db.connection = await asyncio.get_event_loop().run_in_executor(db.executor, _connect) return db
def __init__(self, path, tables={}, enable_traces=True): """ Arguments: path (str): The path to the database file. tables (dictionary of {str: tuple of str}, optional): A diction of {name: schema} pairs, where a schema is list of tuple pairs, of the form: (name, type). enable_traces(bool, optional): Enable traces for user defined functions and aggregates. """ self.path = fs.path(path) # Create directory if needed. parent_dir = fs.dirname(path) if parent_dir: fs.mkdir(parent_dir) self.connection = sql.connect(self.path) for name,schema in six.iteritems(tables): self.create_table(name, schema) io.debug("Opened connection to '{0}'".format(self.path)) # Register exit handler atexit.register(self.close) # Enable traces for user defined functions and aggregates. See: # # https://docs.python.org/2/library/sqlite3.html#sqlite3.enable_callback_tracebacks if enable_traces: sql.enable_callback_tracebacks(True)
def __init__(self, bot): if not hasattr(self.bot, 'cogs_messages'): raise Exception("Cog message needed") self.bot = bot directory = str(data_manager.cog_data_path()) + "/{}".format( self.__class__.__name__) # let's initiate the storage if not os.path.exists(directory): os.makedirs(directory) sqlite3.enable_callback_tracebacks(True) sqlite3.register_adapter(bool, int) sqlite3.register_converter("BOOLEAN", lambda v: bool(int(v))) self.conn = sqlite3.connect(directory + '/votes.db') c = self.conn.cursor() scheme_votes = ", ".join([ 'id INTEGER PRIMARY KEY', 'member_id INTEGER', 'picture_url TEXT', 'created_at TIMESTAMP', 'end_at TIMESTAMP', 'closed BOOLEAN DEFAULT 0' ]) scheme_voices = ", ".join([ 'id INTERGER PRIMARY KEY', 'vote_id INTEGER', 'value INTEGER', 'reason TEXT', 'valid BOOLEAN DEFAULT 0', 'created_at TIMESTAMP', 'FOREIGN KEY(vote_id) REFERENCES votes(id)' ]) if c.execute("SELECT name FROM sqlite_master WHERE type='table'\ AND name='votes'").fetchone() is None: c.execute('''CREATE TABLE 'votes' ({})'''.format(scheme_votes)) c.execute('''CREATE TABLE 'voices' ({})'''.format(scheme_voices)) self.conn.commit() c.close()
def __init__(self, filename): '''初始化数据库''' # filename[string],数据库文件名 super(DataBase, self).__init__() # 生成日志对象 self.logger = log.get_logger("DataBase") # 若数据库文件夹不存在,创建文件夹 db_path = 'db' if not os.path.exists(db_path): self.logger.info("create directory: db") os.mkdir(db_path) db_name = os.path.join(db_path, filename) if not os.path.exists(db_name): self.logger.info("create database file:" + filename) self.__create_table(db_name) # 调试开关 sqlite3.enable_callback_tracebacks(True) # 连接数据库 self.con = sqlite3.connect(db_name) self.con.row_factory = sqlite3.Row self.logger.info("init DataBase successfully")
def get_db(path, **collectors): conn = sqlite3.connect(path) conn.text_factory = str conn.row_factory = sqlite3.Row sqlite3.enable_callback_tracebacks(True) conn.create_collation( "alphanum", lambda x1, x2: cmp(alphanum_key(x1), alphanum_key(x2))) conn.create_aggregate("wavg", 2, SqliteWeightedAvg) conn.create_aggregate("std", 1, SqliteStd) conn.create_aggregate("corrcoef", 2, SqliteCorrcoef) conn.create_aggregate("correlate", 2, SqliteCorrelate) conn.create_aggregate("auto_correlate", 1, SqliteAutoCorrelate) conn.create_aggregate("first", 1, SqliteFirst) conn.create_aggregate("last", 1, SqliteLast) conn.create_aggregate("plot", 2, SqlitePlot) conn.create_aggregate("plot", 3, SqlitePlot) conn.create_aggregate("hist", 2, SqliteHist) conn.create_aggregate("corr", 3, SqliteCorr) conn.create_aggregate("scatter", 3, SqliteScatter) for i in range(2, 32): conn.create_aggregate("bar", i, SqliteBar) [ dump2db(conn, table, collector) for table, collector in collectors.items() ] return conn
def get_genes_simple(db, location=None, use_strand=False, overlap=True): sql, sql_parameters = location_to_sql(location, use_strand, overlap) sql = "{0} {1} {2}".format(SQL_GENES_SIMPLE, sql, SQL_GENES_SIMPLE_ORDER_BY) LOG.debug("SQL:\n{0}".format(sql)) LOG.debug("PARAMETERS: {0}".format(sql_parameters)) conn = sqlite3.connect(db) sqlite3.enable_callback_tracebacks(True) conn.row_factory = sqlite3.Row conn.text_factory = str cursor = conn.cursor() cursor.execute(sql, sql_parameters) genes = [] for r in cursor: genes.append( Gene(r['ensembl_id'], r['seqid'], r['start'], r['end'], r['strand'])) cursor.close() conn.close() return genes
def get_nearest(self,lokasi,rad): around = bounding(lokasi,rad) print(around) self.conn.create_function('jarak',4,jarak) stmt = ("select id,agenda,host,jarak(:slat,:slon,CAST(lat as real),CAST(lon as real)) as D from " "(SELECT id,agenda,host,lat,lon FROM ngaji " "WHERE lat BETWEEN :minlat AND :maxlat AND lon BETWEEN :minlon AND :maxlon " "AND tanggal BETWEEN date('now','start of day') AND date('now','+7 days')) as FirstCut" " WHERE jarak(:slat,:slon,CAST(lat as real),CAST(lon as real)) < :rad ORDER BY D") #.format(lokasi)) #,around[0],around[1],around[2],around[3],lokasi,rad)) #my = "SELECT id,agenda,host,lat,lon from ngaji where lat between :minlat AND :maxlat AND lon between :minlon AND :maxlon" #.format(around[0],around[1],around[2],around[3]) par ={'slat':lokasi[0],'slon':lokasi[1],'rad':rad,'minlat':around[0],'maxlat':around[1],'minlon':around[3],'maxlon':around[2]} #print(par[0]) #print(stmt) #test = "select agenda from ngaji order by (({}-lat)*({}-lat)) + (({}-lon)*({}-lon)) ASC".format(lokasi[0],lokasi[0],lokasi[1],lokasi[1]) #print(test) sqlite3.enable_callback_tracebacks(True) data = self.conn.execute(stmt,par) all = data.fetchall() #print(all) if not all: return None #"tidak ada lokasi terdekat" else : return all
def __init__(self, path): self._path = path self._db = sqlite3.connect(path) self._db.row_factory = sqlite3.Row self._db.set_authorizer(self.auth_cb) sqlite3.enable_callback_tracebacks(True)
def make_index(self): """Generate an index of all files' contents""" type = settings.get("INDEX", False) if type: data = self.indexdata.copy() W = re.compile(r'\W+', re.U) for k in data: data[k] = list( set(W.split(re.sub(r'<.+?>', '', data[k].lower())))) data[k] = filter(lambda s: s and len(s) > 1, data[k]) if type in ("ALL", "JSON"): index = open(os.path.join(settings.BUILD_TARGET, "index.json"), "wb") index.write(json.dumps(data)) index.close() if type in ("ALL", "SQLITE"): sqlite3.enable_callback_tracebacks(settings.DEBUG) db = sqlite3.connect( os.path.join(settings.BUILD_TARGET, "index.sqlite")) cur = db.cursor() cur.execute('CREATE TABLE terms ( p, t )') for k in data: for i in data[k]: cur.execute('INSERT INTO terms (p, t) VALUES (?, ?)', (unicode(k), unicode(i))) cur.close() db.commit() db.close() return bool(type)
def execute(tables, query): # from sqlalchemy import create_engine # engine = create_engine('sqlite:///:memory:', echo=True) sqlite3.enable_callback_tracebacks(True) con = sqlite3.connect(':memory:', detect_types=sqlite3.PARSE_DECLTYPES) _register_adapters(sqlite3) _register_converters(sqlite3) _create_functions(con, functions) _create_aggregate_functions(con, aggregate_functions) con.execute("PRAGMA temp_store = MEMORY") con.execute("PRAGMA journal_mode = OFF") table_names = _write_table(tables, con) for i, table_name in enumerate(table_names): query = query.replace("""#{{DF({i})}}""".format(i=i), table_name) res = _get_deserialized_table(sql.read_sql(query, con)) # except Exception as e: # exc_info = sys.exc_info() # print(e.args) # print(traceback.format_stack()) # raise e # raise BrighticsFunctionException("0100",str(e)).add_detail_message() # data_utils.validate_column_name(res) con.close() # delete tables? return {'out_table': res}
def __init__(self, bot): self.bot = bot directory = str(data_manager.cog_data_path()) + "/{}".format( self.__class__.__name__) # let's initiate the storage if not os.path.exists(directory): os.makedirs(directory) sqlite3.enable_callback_tracebacks(True) sqlite3.register_adapter(bool, int) sqlite3.register_converter("BOOLEAN", lambda v: bool(int(v))) self.conn = sqlite3.connect(directory + '/messages.db', detect_types=sqlite3.PARSE_DECLTYPES) scheme = ", ".join([ 'channel_id INTEGER', 'message_id INTEGER', 'author_id INTEGER', 'date TIMESTAMP', 'content TEXT', 'deleted BOOLEAN DEFAULT 0', 'edited BOOLEAN DEFAULT 0', 'revision_count INTEGER DEFAULT 0', 'PRIMARY KEY (message_id, revision_count)' ]) c = self.conn.cursor() # for each guild let's check if table exist for guild in self.bot.guilds: # if the table doesn't exist if c.execute("SELECT name FROM sqlite_master WHERE type='table'\ AND name={}".format(guild.id)).fetchone() is None: c.execute('''CREATE TABLE '{}' ({})'''.format( guild.id, scheme)) c.execute( "CREATE INDEX `author_index` ON `{}` ( `author_id` )". format(guild.id)) self.conn.commit() c.close() self.bot.cogs_messages = True
def get_db(): db = getattr(g, '_database', None) if db is None: sqlite3.enable_callback_tracebacks(True) db = g._database = sqlite3.connect(DATABASE) db.row_factory = sqlite3.Row return db
def connection_setup(self, db_connection): if db_connection.vendor == 'sqlite': # add MD5 support for sqlite; this calls back to python and will probably have a performance impact import hashlib import sqlite3 sqlite3.enable_callback_tracebacks(True) # otherwise errors get ignored db_connection.connection.create_function("MD5", 1, lambda c: hashlib.md5(c.encode('utf8')).hexdigest())
def get_sql_connection(filepath): """Open a SQLite database and return the connection object Args: filepath (str): sqlite filepath Returns: sqlite3.Connection: Sqlite3 Connection The connection is initialized with `row_factory = Row`. So all results are accessible via indexes or keys. """ connection = sqlite3.connect(filepath) # Activate Foreign keys connection.execute("PRAGMA foreign_keys = ON") connection.row_factory = sqlite3.Row foreign_keys_status = connection.execute("PRAGMA foreign_keys").fetchone()[0] LOGGER.debug("get_sql_connection:: foreign_keys state: %s", foreign_keys_status) assert foreign_keys_status == 1, "Foreign keys can't be activated :(" # Create function for SQLite def regexp(expr, item): # Need to cast item to str... costly return re.search(expr, str(item)) is not None connection.create_function("REGEXP", 2, regexp) if LOGGER.getEffectiveLevel() == logging.DEBUG: # Enable tracebacks from custom functions in DEBUG mode only sqlite3.enable_callback_tracebacks(True) return connection
def connect_database(db=DB_NAME): if db is None: mydb = ':memory:' else: mydb = db print('Conexao Estabilizada') with sql.connect(db) as connection: connection.create_function("md4", 1, md4Hash) sql.enable_callback_tracebacks(True) return connection
def _remove(self, args): ''' Remove a package ''' if len(args) < 2: log.error('A package must be specified') return False package = args[1] log.debug('Removing package {0}'.format(package)) if not os.path.exists(self.opts['spm_db']): log.error('No database at {0}, cannot remove {1}'.format(self.opts['spm_db'], package)) return # Look at local repo index sqlite3.enable_callback_tracebacks(True) conn = sqlite3.connect(self.opts['spm_db'], isolation_level=None) cur = conn.cursor() data = conn.execute('SELECT * FROM packages WHERE package=?', (package, )) if not data.fetchone(): log.error('Package {0} not installed'.format(package)) return # Find files that have not changed and remove them data = conn.execute('SELECT path, sum FROM files WHERE package=?', (package, )) import pprint dirs = [] for filerow in data.fetchall(): if os.path.isdir(filerow[0]): dirs.append(filerow[0]) continue with salt.utils.fopen(filerow[0], 'r') as fh_: file_hash = hashlib.sha1() file_hash.update(fh_.read()) digest = file_hash.hexdigest() if filerow[1] == digest: log.trace('Removing file {0}'.format(filerow[0])) os.remove(filerow[0]) else: log.trace('Not removing file {0}'.format(filerow[0])) conn.execute('DELETE FROM files WHERE path=?', (filerow[0], )) # Clean up directories for dir_ in sorted(dirs, reverse=True): conn.execute('DELETE FROM files WHERE path=?', (dir_, )) try: log.trace('Removing directory {0}'.format(dir_)) os.rmdir(dir_) except OSError: # Leave directories in place that still have files in them log.trace('Cannot remove directory {0}, probably not empty'.format(dir_)) conn.execute('DELETE FROM packages WHERE package=?', (package, ))
def __init__(self, sqlite_connection): self.connection = sqlite_connection # For easier debugging of custom SQL functions written in Python sqlite3.enable_callback_tracebacks(True) # LIKE queries must be case-sensitive in order to use an index self.connection.execute('PRAGMA case_sensitive_like=ON') self.date_offsets = dict( self.connection.execute('SELECT date, offset FROM date')) self.practice_offsets = dict( self.connection.execute('SELECT code, offset FROM practice')) self.connection.create_aggregate('MATRIX_SUM', 1, MatrixSum)
def open_sqlite_db(location): sqlite_numpy_handling() conn = sqlite3.connect(location, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES) conn.create_aggregate('mean', 2, MeanFunc) conn.create_aggregate('stddev', 2, StddevFunc) sqlite3.enable_callback_tracebacks(True) return conn
def check_tracebacks(self, strings): """Convenience context manager for testing callback tracebacks.""" sqlite.enable_callback_tracebacks(True) try: buf = io.StringIO() with contextlib.redirect_stderr(buf): yield tb = buf.getvalue() for s in strings: self.assertIn(s, tb) finally: sqlite.enable_callback_tracebacks(False)
def init(): ''' Get an sqlite3 connection, and initialize the package database if necessary ''' if not os.path.exists(__opts__['spm_cache_dir']): log.debug('Creating SPM cache directory at {0}'.format( __opts__['spm_db'])) os.makedirs(__opts__['spm_cache_dir']) if not os.path.exists(__opts__['spm_db']): log.debug('Creating new package database at {0}'.format( __opts__['spm_db'])) sqlite3.enable_callback_tracebacks(True) conn = sqlite3.connect(__opts__['spm_db'], isolation_level=None) try: conn.execute('SELECT count(*) FROM packages') except OperationalError: conn.execute('''CREATE TABLE packages ( package text, version text, release text, installed text, os text, os_family text, dependencies text, os_dependencies text, os_family_dependencies text, summary text, description text )''') try: conn.execute('SELECT count(*) FROM files') except OperationalError: conn.execute('''CREATE TABLE files ( package text, path text, size real, mode text, sum text, major text, minor text, linkname text, linkpath text, uname text, gname text, mtime text )''') return conn
def setUp(self): sqlite3.enable_callback_tracebacks(True) self.cache = Cache(sqlite3.connect(':memory:')) self.stdscr = curses.initscr() curses.noecho() curses.cbreak() curses.curs_set(False) self.color_scheme = { 'sidebar': 0, 'sidebar-new': curses.A_UNDERLINE, 'sidebar-indicator': curses.A_REVERSE, 'sidebar-new-indicator': curses.A_UNDERLINE | curses.A_REVERSE, }
def RegisterSQLiteFunctions(dbh): sqlite3.enable_callback_tracebacks(True) dbh.create_function("REGEXP", 2, Regexp) dbh.create_function('Basename', 1, Basename) dbh.create_function('BasenameN', 2, BasenameN) dbh.create_function("GetRegMatch", 3, GetRegMatch) dbh.create_function("GetRegMatchArray", 3, GetRegMatchArray) dbh.create_function("RemoveNewLines", 1, RemoveNewLines) dbh.create_function("DtFormat", 2, DtFormat) dbh.create_function("DtFormatTz", 4, DtFormatTz) if GEO_MANAGER.DB_ATTACHED: dbh.create_function("GetIpInfo", 1, GetIpInfo)
def wrapper(self, *args, **kwargs): # First, run the test with traceback enabled. sqlite.enable_callback_tracebacks(True) buf = io.StringIO() with contextlib.redirect_stderr(buf): func(self, *args, **kwargs) tb = buf.getvalue() for s in strings: self.assertIn(s, tb) # Then run the test with traceback disabled. sqlite.enable_callback_tracebacks(False) func(self, *args, **kwargs)
async def connect(cls, path: Union[bytes, str], *args, **kwargs): sqlite3.enable_callback_tracebacks(True) db = cls() def _connect_writer(): db.writer_connection = sqlite3.connect(path, *args, **kwargs) readers = max(os.cpu_count() - 2, 2) db.reader_executor = ReaderExecutorClass( max_workers=readers, initializer=initializer, initargs=(path, ) ) await asyncio.get_event_loop().run_in_executor(db.writer_executor, _connect_writer) db.read_ready.set() return db
def run(dbpath, *, setup=setup): sqlite3.enable_callback_tracebacks(True) with sqlite3.connect(dbpath) as conn: conn.row_factory = sqlite3.Row try: conn.set_trace_callback(print) c = conn.cursor() setup(c) yield c except Exception: conn.rollback() raise else: conn.commit()
def init(): ''' Get an sqlite3 connection, and initialize the package database if necessary ''' if not os.path.exists(__opts__['spm_cache_dir']): log.debug('Creating SPM cache directory at {0}'.format(__opts__['spm_db'])) os.makedirs(__opts__['spm_cache_dir']) if not os.path.exists(__opts__['spm_db']): log.debug('Creating new package database at {0}'.format(__opts__['spm_db'])) sqlite3.enable_callback_tracebacks(True) conn = sqlite3.connect(__opts__['spm_db'], isolation_level=None) try: conn.execute('SELECT count(*) FROM packages') except OperationalError: conn.execute('''CREATE TABLE packages ( package text, version text, release text, installed text, os text, os_family text, dependencies text, os_dependencies text, os_family_dependencies text, summary text, description text )''') try: conn.execute('SELECT count(*) FROM files') except OperationalError: conn.execute('''CREATE TABLE files ( package text, path text, size real, mode text, sum text, major text, minor text, linkname text, linkpath text, uname text, gname text, mtime text )''') return conn
def _open(self, name): # type: (str) -> None sqlite3.enable_callback_tracebacks(True) sqlite3.register_converter("BLOB", Blob.deserialise) try: self.connection = sqlite3.connect( name, timeout=1, detect_types=sqlite3.PARSE_DECLTYPES) except sqlite3.Error as e: logger.debug(e) return # see: https://docs.python.org/2/library/sqlite3.html#sqlite3.Connection.row_factory self.connection.row_factory = sqlite3.Row self.connection.text_factory = sqlite3.OptimizedUnicode self._create_table()
def _open(self, name): # type: (str) -> None sqlite3.enable_callback_tracebacks(True) sqlite3.register_converter("BLOB", Blob.deserialise) try: self.connection = sqlite3.connect(name, timeout=1, detect_types=sqlite3.PARSE_DECLTYPES) except sqlite3.Error as e: logger.debug(e.message) return # see: https://docs.python.org/2/library/sqlite3.html#sqlite3.Connection.row_factory self.connection.row_factory = sqlite3.Row self.connection.text_factory = sqlite3.OptimizedUnicode self._create_table()
def _setup_database(self): connection = sqlite3.connect(':memory:') cursor = connection.cursor() connection.create_function("hamming_dist", 2, self._hamming_distance) connection.row_factory = sqlite3.Row sqlite3.enable_callback_tracebacks(True) # Create table cursor.execute('''CREATE TABLE IF NOT EXISTS file_details ( file_name text, hash_code text, cohort_id integer)''') cursor.execute( '''CREATE INDEX IF NOT EXISTS idx_hash on file_details(hash_code)''' ) return connection, cursor
def check_tracebacks(self, cm, exc, regex, obj_name): """Convenience context manager for testing callback tracebacks.""" sqlite.enable_callback_tracebacks(True) try: buf = io.StringIO() with contextlib.redirect_stderr(buf): yield self.assertEqual(cm.unraisable.exc_type, exc) if regex: msg = str(cm.unraisable.exc_value) self.assertIsNotNone(regex.search(msg)) if obj_name: self.assertEqual(cm.unraisable.object.__name__, obj_name) finally: sqlite.enable_callback_tracebacks(False)
def _open_to_use(self): """ Open to use. """ if LocalStore._debug: print "Open MegBook %s to use." % self.local_store_filename self.conn = sqlite3.connect(self.local_store_filename) self.conn.create_function("regexp",2,megregexp) self.conn.row_factory = sqlite3.Row # self.conn.text_factory = str if LocalStore._debug: sqlite3.enable_callback_tracebacks(True) else: sqlite3.enable_callback_tracebacks(True)#while in testing TODO if LocalStore._debug: print ".... ready."
def __init__(self): # Initialize Logging environment fmt = '%(asctime)s %(levelname)s %(name)s.%(funcName)s() %(message)s' formatter = logging.Formatter(fmt=fmt) handler = logging.FileHandler(os.path.join('logs/', 'Handler.log')) handler.setFormatter(formatter) _event_log = logging.getLogger(__name__) _event_log.setLevel(logging.DEBUG) _event_log.addHandler(handler) # Initialize class variables self.__closed = False self.__post_to_slack = False self.__slack_url = '' self.__last_error = (time() - 300) self.__DB_TIMEOUT = 0.15 self.__OB_PATH = 'order_books.db' self.__TICKER_PATH = 'tickers.db' self.__logger_thread = threading.Thread(target=self.__query_thread, daemon=True) # Initialize Databasse sqlite3.enable_callback_tracebacks(True) self.__init_database() # Initialize order books and loggers self.percent_ranges = [0.01, 0.05, 0.1, 0.5, 1, 2.5, 5, 10, 25] self.product_ids = [ GDAXConst.btc_usd, GDAXConst.eth_usd, GDAXConst.ltc_usd, GDAXConst.bch_usd ] self._order_books = { GDAXConst.btc_usd: OrderBook(50000, GDAXConst.btc_usd), GDAXConst.eth_usd: OrderBook(10000, GDAXConst.eth_usd), GDAXConst.ltc_usd: OrderBook(5000, GDAXConst.ltc_usd), GDAXConst.bch_usd: OrderBook(20000, GDAXConst.bch_usd) } self.ticker_columns = [ GDAXConst.system_time, GDAXConst.server_time, GDAXConst.product_id, GDAXConst.price, GDAXConst.open_24h, GDAXConst.volume_24h, GDAXConst.best_bid, GDAXConst.best_ask, GDAXConst.side, GDAXConst.last_size ] self.__logger_thread.start() self._event_log.debug("initialized...")
def wrapper(*w_args, **w_kwargs): global db_file query_dict = func(*w_args, **w_kwargs) try: conn = sqlite3.connect(db_file) except sqlite.Error as sqlerr: # TODO: write a custom function for this print(sys.argv[0], "Could not open the database file:{0}".format(sqlerr)) exit(2) c = conn.cursor() if query_dict['func']: sqlite3.enable_callback_tracebacks(True) conn.create_function(*query_dict['func']) c.execute(query_dict['query'], query_dict['args']) if query_dict['query'].lstrip().upper().startswith("SELECT"): return c.fetchall() return conn.commit()
def setUp(self): sqlite3.enable_callback_tracebacks(True) self.cache = Cache(sqlite3.connect(':memory:')) date1 = datetime.datetime.fromtimestamp(1, datetime.timezone.utc) date2 = datetime.datetime.fromtimestamp(2, datetime.timezone.utc) self.cache.add_message(1337, date=date2, from_=['"Jane Doe" <*****@*****.**>'], subject='Janie', flags={'\\Seen'}, labels=set(), modseq=1) self.cache.add_message(1338, date=date1, from_=['"John Doe" <*****@*****.**>'], subject='Johnnie', flags={'\\Answered'}, labels=set(), modseq=2) self.cache.add_message(1336, date=date2, from_=['"Joe Bloggs" <*****@*****.**>'], subject='Joey', flags={'\\Flagged'}, labels=set(), modseq=3) self.cache.add_mailbox_uid('INBOX', 1, 1337) self.cache.add_mailbox_uid('INBOX', 2, 1338) self.cache.add_mailbox_uid('INBOX', 5, 1336) self.stdscr = curses.initscr() curses.noecho() curses.cbreak() curses.curs_set(False) self.color_scheme = { 'index': 0, 'index-new': curses.A_UNDERLINE, 'index-indicator': curses.A_REVERSE, 'index-new-indicator': curses.A_UNDERLINE | curses.A_REVERSE, } # The index is displayed in order of descending dates, with ties broken # by the Gmail message ID, also in descending order. So, we should # have, in order: self.keys = [ (-2, -1337), (-2, -1336), (-1, -1338), ] self.rows = [ (1337, 2, 0, '"Jane Doe" <*****@*****.**>', 'Janie', '\\Seen'), (1336, 2, 0, '"Joe Bloggs" <*****@*****.**>', 'Joey', '\\Flagged'), (1338, 1, 0, '"John Doe" <*****@*****.**>', 'Johnnie', '\\Answered'), ]
def main(source, dest, dict): if not os.path.exists(source): raise IOError(f'"No database found at {source}') # XXX old versions of sqlite_utils do not have "attach" and # so will error out. How to establish right version? db = Database(source) # If there are errors in the user defined function, this is the # only way to get the actual error and not # "user-defined function raised exception" sqlite3.enable_callback_tracebacks(True) initialize(dest) db.attach("blacklite_zstd", dest) cctx = None if dict: zstd_dict = create_dictionary(db) cctx = zstd.ZstdCompressor(dict_data=zstd_dict) db.execute(""" CREATE TABLE IF NOT EXISTS blacklite_zstd.zstd_dicts ( dict_id LONG NOT NULL PRIMARY KEY, dict_bytes BLOB NOT NULL) """) db.execute( "INSERT INTO blacklite_zstd.zstd_dicts VALUES (?, ?)", [zstd_dict.dict_id(), zstd_dict.as_bytes()]) else: cctx = zstd.ZstdCompressor() def compress(s): return cctx.compress(bytes(s, "utf8")) db.register_function(compress) db.execute(""" insert into blacklite_zstd.entries (epoch_secs, nanos, level, content) select epoch_secs, nanos, level, compress(content) from entries """) db.execute("COMMIT")
def get_user_by_username(self, personality, username): def regexp(expr, value): if type(expr) != str: expr = str(expr) regex = re.compile(expr) return regex.match(value) is not None conn = sqlite3.connect(self.users) sqlite3.enable_callback_tracebacks(True) conn.create_function("regexp", 2, regexp) if username is None: username = b"" username = username.decode("utf-8") cur = conn.cursor() cur.execute("SELECT username, password, pickup_delay_min, pickup_delay_max, action, sdp FROM users WHERE personality = ? AND ? REGEXP username", (personality, username)) row = cur.fetchone() if row is None: return None password = row[1] if type(password) == int: password = str(password) # ToDo: sdp is not used! Recheck!!! sdp = row[5] if sdp == '' or sdp is None: sdp = self.personalities[personality].default_sdp return User( username=username, username_regex=row[0], password=password, pickup_delay_min=row[2], pickup_delay_max=row[3], action=row[4], sdp=row[5] )
def get_db(path, **collectors): conn = sqlite3.connect(path) conn.text_factory = str conn.row_factory = sqlite3.Row sqlite3.enable_callback_tracebacks(True) conn.create_collation("alphanum", lambda x1,x2: cmp(alphanum_key(x1), alphanum_key(x2))) conn.create_aggregate("wavg", 2, SqliteWeightedAvg) conn.create_aggregate("std", 1, SqliteStd) conn.create_aggregate("corrcoef", 2, SqliteCorrcoef) conn.create_aggregate("correlate", 2, SqliteCorrelate) conn.create_aggregate("auto_correlate", 1, SqliteAutoCorrelate) conn.create_aggregate("first", 1, SqliteFirst) conn.create_aggregate("last", 1, SqliteLast) conn.create_aggregate("plot", 2, SqlitePlot) conn.create_aggregate("plot", 3, SqlitePlot) conn.create_aggregate("hist", 2, SqliteHist) conn.create_aggregate("corr", 3, SqliteCorr) conn.create_aggregate("scatter", 3, SqliteScatter) for i in range(2,32): conn.create_aggregate("bar", i, SqliteBar) [dump2db(conn, table, collector) for table,collector in collectors.items()] return conn
def connect_db(): db = getattr(g, '_database', None) if db is None: db = g._database = connect_to_database() sqlite3.enable_callback_tracebacks(True) return db
def createConnection(self): sqlite3.enable_callback_tracebacks(True) connection = sqlite3.connect(self.dbPath) connection.row_factory=sqlite3.Row return connection
import sqlite3; sqlite3.enable_callback_tracebacks(True) import unittest from peewee import * from playhouse import sqlite_ext as sqe # use a disk-backed db since memory dbs only exist for a single connection and # we need to share the db w/2 for the locking tests. additionally, set the # sqlite_busy_timeout to 100ms so when we test locking it doesn't take forever ext_db = sqe.SqliteExtDatabase('tmp.db', timeout=.1) # test aggregate. class WeightedAverage(object): def __init__(self): self.total_weight = 0.0 self.total_ct = 0.0 def step(self, value, wt=None): wt = wt or 1.0 self.total_weight += wt self.total_ct += wt * value def finalize(self): if self.total_weight != 0.0: return self.total_ct / self.total_weight return 0.0 # test collations def _cmp(l, r): if l < r:
Created on 11/05/2014 NOTE: Unfinished so far, to be migrated to HDF5 (pytables) in order to achieve better performance @author: davidreyblanco ''' import pandas as pd import numpy as np import time import config #from pandas.io.pytables import Term import sqlite3 # Disable messages appearing sqlite3.enable_callback_tracebacks(False) chunk_size = 30 sample_searches = pd.read_csv(config.data_folder + 'searches.csv.bz2',sep='^',iterator=True,chunksize=chunk_size) record_count = 0 index = 0 block_limit = 5 t_0 = time.time() aggregated = None # Open the database conn = sqlite3.connect(config.data_folder + 'bookings.db') c = conn.cursor() # For each Chunk in datataset for chunk in sample_searches: t0 = time.time() index = index + 1
def __init__(self, **options): super(SQLiteDatabase, self).__init__() self.cursor = cursor_factory(**options) self._options = options sql.enable_callback_tracebacks(True)
print 'Inserting into', arg1 return lite.SQLITE_OK elif sqltype == lite.SQLITE_UPDATE: print 'Updating %s.%s' % (arg1, arg2) return lite.SQLITE_OK elif sqltype == lite.SQLITE_DELETE: print 'Dropping table', arg1 return lite.SQLITE_OK elif sqltype == lite.SQLITE_DROP_TABLE: print 'Dropping table', arg1 return lite.SQLITE_OK lite.enable_callback_tracebacks(True) con = lite.connect('allsortsDB.db') with con: cur = con.cursor() con.set_authorizer(authorizer) sql = """ drop table if exists Salaries; create table Salaries(Id int, Name text, Salary int); insert into salaries values(1, 'Tom', 5400); insert into salaries values(2, 'Frank', 4230); insert into salaries values(3, 'Jane', 3230); insert into salaries values(4, 'Samuel', 3800);
def __init__(self,opt): # database file in root folder self.conn = sqlite3.connect(syspath.BASE_PATH+syspath.DS+opt['dbprefix']+opt['dbname']) if(opt['dbtrace']): # enable error reporting sqlite3.enable_callback_tracebacks(True)
print(cur,data) return data class Averager(object): def __init__(self): self.total = 0.0 self.count = 0 def step(self, value): self.total += value self.count += 1 def finalize(self): return self.total / self.count; register_converter('decimal', foo)#注册新的类型 攻detect_types使用 第一个参数为类型名称 第二个参数是处理原始类型的函数 这个函数接收单一的字节字符串作为输入 enable_callback_tracebacks(True)#处理在转换器和适配器等用户定义回调函数中出现的异常 默认情况下 忽略异常 如果flag为True 则把异常输出到sys.stderr里 conn = connect('F:/Database/SQLite3/PythonTest.sqlite', detect_types=sqlite3.PARSE_COLNAMES)#detect_types可以实现某些额外类型的检测 可以使register_converter注册的类型生效 conn.create_function('upper', 1, lambda s:s.upper())#创建能在SQL中执行的用户定义函数 第一个参数是函数名称 第二个参数是函数参数个数 第三个参数是函数实现 conn.create_aggregate('py_avg', 1, Averager)#创建能在SQL中执行的聚合函数 头两个参数意义同create_function 第三个参数是执行聚合函数操作的类 此类必须支持不带参数初始化 执行接受与num_params中给出的参数数量相同的step(params)方法 并执行finalize方法返回最后结果 conn.create_collation('mcompare', compare)#自定义排序方法 # conn.set_authorizer(validator)#注册一个授权回调函数 在每次访问数据中的一列数据时执行 # conn.set_progress_handler(phandler,1)#set_progress_handler(handler,n) 注册回调函数每n条SQLite虚拟机指令执行一次 handler是一个没有参数的函数 # conn.row_factory = my_row_factory;#通过替换row_factory自己实现创建每一个结果行的方式 这个函数接收两个参数 一个游标对象 一个带有原始结果行的元组 cur = conn.cursor(); cur.execute('select account as "account [decimal]" from portfolio'); while True: row = cur.fetchone();
def _local_install(self, args, pkg_name=None): ''' Install a package from a file ''' if len(args) < 2: log.error('A package file must be specified') return False pkg_file = args[1] self._init_db() roots_path = self.opts['file_roots']['base'][0] pillar_path = self.opts['pillar_roots']['base'][0] comps = pkg_file.split('-') comps = '-'.join(comps[:-2]).split('/') name = comps[-1] if not os.path.exists(pkg_file): log.error('File {0} not found'.format(pkg_file)) return False if not os.path.exists(roots_path): os.makedirs(roots_path) sqlite3.enable_callback_tracebacks(True) conn = sqlite3.connect(self.opts['spm_db'], isolation_level=None) cur = conn.cursor() formula_tar = tarfile.open(pkg_file, 'r:bz2') formula_ref = formula_tar.extractfile('{0}/FORMULA'.format(name)) formula_def = yaml.safe_load(formula_ref) data = conn.execute('SELECT package FROM packages WHERE package=?', (formula_def['name'], )) if data.fetchone() and not self.opts['force']: print('Package {0} already installed, not installing again'.format(formula_def['name'])) return if 'dependencies' in formula_def: if not isinstance(formula_def['dependencies'], list): formula_def['dependencies'] = [formula_def['dependencies']] needs = [] for dep in formula_def['dependencies']: if not isinstance(dep, string_types): continue data = conn.execute('SELECT package FROM packages WHERE package=?', (dep, )) if data.fetchone(): continue needs.append(dep) print('Cannot install {0}, the following dependencies are needed: ' '\n\n{1}'.format(formula_def['name'], '\n'.join(needs))) return if pkg_name is None: print('Installing package from file {0}'.format(pkg_file)) else: print('Installing package {0}'.format(pkg_name)) if not self.opts['assume_yes']: res = input('Proceed? [N/y] ') if not res.lower().startswith('y'): print('... canceled') return False print('... installing') log.debug('Locally installing package file {0} to {1}'.format(pkg_file, roots_path)) for field in ('version', 'release', 'summary', 'description'): if field not in formula_def: log.error('Invalid package: the {0} was not found'.format(field)) return False pkg_files = formula_tar.getmembers() # First pass: check for files that already exist existing_files = [] for member in pkg_files: if member.isdir(): continue if member.name.startswith('{0}/_'.format(name)): # Module files are distributed via _modules, _states, etc new_name = member.name.replace('{0}/'.format(name), '') out_file = os.path.join(roots_path, new_name) elif member.name == '{0}/pillar.example'.format(name): # Pillars are automatically put in the pillar_roots new_name = '{0}.sls.orig'.format(name) out_file = os.path.join(pillar_path, new_name) else: out_file = os.path.join(roots_path, member.name) if os.path.exists(out_file): existing_files.append(out_file) if not self.opts['force']: log.error('{0} already exists, not installing'.format(out_file)) if existing_files and not self.opts['force']: return # We've decided to install conn.execute('INSERT INTO packages VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', ( name, formula_def['version'], formula_def['release'], datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT'), formula_def.get('os', None), formula_def.get('os_family', None), formula_def.get('dependencies', None), formula_def.get('os_dependencies', None), formula_def.get('os_family_dependencies', None), formula_def['summary'], formula_def['description'], )) # No defaults for this in config.py; default to the current running # user and group uid = self.opts.get('spm_uid', os.getuid()) gid = self.opts.get('spm_gid', os.getgid()) uname = pwd.getpwuid(uid)[0] gname = grp.getgrgid(gid)[0] # Second pass: install the files for member in pkg_files: out_path = roots_path file_ref = formula_tar.extractfile(member) member.uid = uid member.gid = gid member.uname = uname member.gname = gname if member.isdir(): digest = '' else: file_hash = hashlib.sha1() file_hash.update(file_ref.read()) digest = file_hash.hexdigest() if member.name.startswith('{0}/_'.format(name)): # Module files are distributed via _modules, _states, etc member.name = member.name.replace('{0}/'.format(name), '') elif member.name == '{0}/pillar.example'.format(name): # Pillars are automatically put in the pillar_roots member.name = '{0}.sls.orig'.format(name) out_path = pillar_path formula_tar.extract(member, out_path) conn.execute('INSERT INTO files VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', ( name, '{0}/{1}'.format(out_path, member.path), member.size, member.mode, digest, member.devmajor, member.devminor, member.linkname, member.linkpath, member.uname, member.gname, member.mtime )) formula_tar.close() conn.close()
import os import sqlite3 try: sqlite3.enable_callback_tracebacks(True) except AttributeError: pass from peewee import * from peewee import print_ from playhouse.sqlite_ext import * from playhouse.tests.base import database_initializer from playhouse.tests.base import ModelTestCase from playhouse.tests.base import PeeweeTestCase from playhouse.tests.base import skip_if from playhouse.tests.base import skip_unless # Use a disk-backed db since memory dbs only exist for a single connection and # we need to share the db w/2 for the locking tests. additionally, set the # sqlite_busy_timeout to 100ms so when we test locking it doesn't take forever ext_db = database_initializer.get_database( 'sqlite', c_extensions=False, db_class=SqliteExtDatabase, timeout=0.1, use_speedups=False) CLOSURE_EXTENSION = os.environ.get('CLOSURE_EXTENSION') FTS5_EXTENSION = FTS5Model.fts5_installed()
def __init__(self, sb): GObject.GObject.__init__(self) self.sb = soundblizzard.soundblizzard # fakes for tab completion self.sb = sb # self.config = soundblizzard.config.config #provides direct access to config dictionary if not (os.path.isdir(os.path.dirname(self.sb.config.config["databasefile"]))): loggy.warn("Creating directories for requested database file") os.makedirs(os.path.dirname(self.sb.config.config["databasefile"])) or loggy.warn( "...could not create config dir" ) self.dbpath = self.sb.config.config["databasefile"] loggy.log("Database: Loading database from " + self.dbpath) if loggy.debug: sqlite3.enable_callback_tracebacks(True) self.conn = sqlite3.connect(self.dbpath) or loggy.warn("Could not connect to database") self.conn.row_factory = sqlite3.Row self.curs = self.conn.cursor() self.curs.row_factory = sqlite3.Row self.keys = ( "artist", "title", "album", "date", "genre", "duration", "rating", "album-artist", "track-count", "track-number", "mimetype", #'atime', "mtime", #'ctime', #'dtime', #'size', "uri", "songid", ) # songid must be last as it's the primary key self.blanktags = { "artist": "", "title": "", "album": "", "date": 0, "genre": "", "duration": 0, "rating": 0, "album-artist": "", "track-count": 1, "track-number": 1, "mimetype": "", #'atime':0, "mtime": 0, #'ctime':0, #'dtime':0, #'size':0, "uri": "", "songid": None, } # self.keys = self.blanktags.keys()[0:10]#('uri', 'artist', 'title', 'album', 'date', 'genre', 'duration', 'rating','album-artist', 'track-count', 'track-number') # + mimetype , atime, mtime, ctime, dtime #TODO:autogenerate from blank dict # self.addkeys = self.blanktags.keys()[10:] #('mimetype', 'atime', 'mtime', 'ctime', 'dtime', 'size') # self.totkeys = self.keys + self.addkeys +('songid',) # self.blanktags = {} # creates blank key set so no type errors when key looked up does not exist # for key in self.totkeys: # self.blanktags[key] = None # TODO: move gubbins to config # self.conn.commit() # TODO: check database is okay, contains tables and necessary fields etc. try: self.curs.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='media'") self.curs.execute("SELECT * from 'media'") except sqlite3.OperationalError: loggy.log("Database: no media table, recreating") self.recreate_media_table()
def _remove(self, args): ''' Remove a package ''' if len(args) < 2: log.error('A package must be specified') return False package = args[1] print('Removing package {0}'.format(package)) if not self.opts['assume_yes']: res = input('Proceed? [N/y] ') if not res.lower().startswith('y'): print('... canceled') return False print('... removing') if not os.path.exists(self.opts['spm_db']): log.error('No database at {0}, cannot remove {1}'.format(self.opts['spm_db'], package)) return # Look at local repo index sqlite3.enable_callback_tracebacks(True) conn = sqlite3.connect(self.opts['spm_db'], isolation_level=None) cur = conn.cursor() data = conn.execute('SELECT * FROM packages WHERE package=?', (package, )) if not data.fetchone(): log.error('Package {0} not installed'.format(package)) return # Find files that have not changed and remove them data = conn.execute('SELECT path, sum FROM files WHERE package=?', (package, )) dirs = [] for filerow in data.fetchall(): if os.path.isdir(filerow[0]): dirs.append(filerow[0]) continue with salt.utils.fopen(filerow[0], 'r') as fh_: file_hash = hashlib.sha1() file_hash.update(fh_.read()) digest = file_hash.hexdigest() if filerow[1] == digest: log.trace('Removing file {0}'.format(filerow[0])) os.remove(filerow[0]) else: log.trace('Not removing file {0}'.format(filerow[0])) conn.execute('DELETE FROM files WHERE path=?', (filerow[0], )) # Clean up directories for dir_ in sorted(dirs, reverse=True): conn.execute('DELETE FROM files WHERE path=?', (dir_, )) try: log.trace('Removing directory {0}'.format(dir_)) os.rmdir(dir_) except OSError: # Leave directories in place that still have files in them log.trace('Cannot remove directory {0}, probably not empty'.format(dir_)) conn.execute('DELETE FROM packages WHERE package=?', (package, ))