def _connectToDb(self): """ Opens a db connection """ self.con = sqlite3.connect(self.db_path, detect_types=sqlite3.PARSE_DECLTYPES) sqlite3.register_adapter(bool, int) sqlite3.register_converter("BOOLEAN", lambda v: bool(int(v))) self.con.row_factory = self._dictFactory
def get_db(config): db = sqlite3.connect(config.get("CONNECTION", "SQLITE_DB")) db.row_factory = sqlite3.Row sqlite3.register_adapter(bool, int) sqlite3.register_converter("BOOLEAN", lambda v: bool(int(v))) return db
def _save_as_sqlite(packages, absolute_path): """Save a list of packages as an SQLite3 binary file. Arguments: packages -- a list of TLPackage objects absolute_path -- output path for the database An existing file at this path will be removed before writing, to ensure that you end up with a consistent database. This is mainly for symmetry with the plist writing method. Not all values are saved to sqlite. Notably runfiles and other dictionary types are not written at present, since they should probably be in a separate table. """ import sqlite3 import os import errno def _adapt_list(lst): if lst is None or len(lst) == 0: return None return buffer("\0".join(lst).encode("utf-8")) sqlite3.register_adapter(list, _adapt_list) sqlite3.register_adapter(tuple, _adapt_list) # plistlib will overwrite the previous file, so do the same with sqlite # instead of adding rows try: os.remove(absolute_path) except OSError, e: if e.errno != errno.ENOENT: raise e
def populate_data(db_path, data_file_path): print 'populate data from [{}] ...'.format(data_file_path), sql = ''' INSERT INTO [bills] ( [bill_date], [peak_time_power], [valley_time_power], [peak_time_price], [valley_time_price], [price] ) VALUES (?, ?, ?, ?, ?, ?) ''' db = sqlite3.connect(db_path) def adapter_Decimal(d): return str(d) sqlite3.register_adapter(decimal.Decimal, adapter_Decimal) with open(data_file_path) as f: for line in f: line = line.strip() if line[0] == '#': continue args = line.split(',') args = [i.strip() for i in args] args[0] = datetime.date(*map(int, args[0].split('-'))) args[1:3] = map(int, args[1:3]) args[3:] = map(decimal.Decimal, args[3:]) db.execute(sql, args) db.commit() db.close() print 'done'
def main(): # 2012 Illinois ACS PUMS file with NO pre-processing. filename = 'ss12pil.csv' # OnePerson class from week 3, store the 100th row in the object. p = OnePerson([]) p.read_line(filename, 100) # tell sqlite3 library that we will be using our custom functions # adapt_person and convert_person as adapter and converter, respectively. sqlite3.register_adapter(OnePerson, adapt_person) sqlite3.register_converter('person', convert_person) # use memory for testing, you can change it to a filename and store it # note that we are using DECLARED TYPES. conn = sqlite3.connect(':memory:', detect_types = sqlite3.PARSE_DECLTYPES) cur = conn.cursor() # call our custom functions. create_table(cur) insert_person(cur, p) print_head(cur) # close connection. cur.close() conn.close()
def __init__(self, database=None, **kwargs): if database is None: database = config.sqlite_database self.database = database self.conn = sqlite3.connect(database) sqlite3.register_adapter(numpy.ndarray, adapt_ndarray) with self.conn: self.conn.execute( """CREATE TABLE IF NOT EXISTS entries ( uuid TEXT NOT NULL, time INT NOT NULL, "key" TEXT NOT NULL, value, PRIMARY KEY(uuid, time, "key") );""" ) self.conn.execute( """CREATE TABLE IF NOT EXISTS status ( uuid TEXT NOT NULL, "key" text NOT NULL, value, PRIMARY KEY(uuid, "key") );""" ) self.status = SQLiteStatus(self) super(SQLiteLog, self).__init__(**kwargs)
def __init__(self, smarthome): self._sh = smarthome self._version = 1 sqlite3.register_adapter(datetime.datetime, self.timestamp) logger.debug("SQLite {0}".format(sqlite3.sqlite_version)) self.connected = True self._fdb = sqlite3.connect(smarthome.base_dir + '/var/db/smarthome.db', check_same_thread=False) self._fdb_lock = threading.Lock() self._fdb_lock.acquire() common = self._fdb.execute("SELECT * FROM sqlite_master WHERE name='common' and type='table';").fetchone() if common is None: self._fdb.execute("CREATE TABLE common (version INTEGER);") self._fdb.execute("INSERT INTO common VALUES (:version);", {'version': self._version}) self._fdb.execute(self._create_db) self._fdb.execute(self._create_index) version = self._version else: version = int(self._fdb.execute("SELECT version FROM common;").fetchone()[0]) if version < self._version: logger.debug("update database") self._fdb.execute("UPDATE common SET version=:version;", {'version': self._version}) self._fdb.commit() self._fdb_lock.release() minute = 60 * 1000 hour = 60 * minute day = 24 * hour week = 7 * day month = 30 * day year = 365 * day self._frames = {'i': minute, 'h': hour, 'd': day, 'w': week, 'm': month, 'y': year} self._times = {'i': minute, 'h': hour, 'd': day, 'w': week, 'm': month, 'y': year} # self.query("alter table history add column power INTEGER;") smarthome.scheduler.add('sqlite', self._pack, cron='2 3 * *', prio=5)
def __init__(self, database_name): sql.register_adapter(np.ndarray, self.adapt_array) sql.register_converter("array", self.convert_array) self.conn = sql.connect('database/'+str(database_name)+'.db', isolation_level=None, detect_types=sql.PARSE_DECLTYPES, check_same_thread=False) if TYPE == 1: self.conn.execute("CREATE TABLE IF NOT EXISTS `files` (" "`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT," "`file_path` TEXT NOT NULL)") self.conn.execute("CREATE TABLE IF NOT EXISTS `features` (" "`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT," "`file_id` INTEGER NOT NULL," "`frame` INTEGER NOT NULL," "`feature` array NOT NULL," "`class` TEXT NOT NULL)") self.conn.execute("CREATE TABLE IF NOT EXISTS `final_weight` (" "`vectors` array NOT NULL," "`class` TEXT NOT NULL)") else: self.conn.execute("CREATE TABLE IF NOT EXISTS `output_classes` (" "`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT," "`file_path` TEXT NOT NULL," "`class` TEXT NOT NULL)") self.conn.execute("CREATE TABLE IF NOT EXISTS `feature_sets` (" "`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT," "`output_class_id` INTEGER NOT NULL," "`frame` INTEGER NOT NULL," "`features` array NOT NULL)")
def __init__(self, dbpath): self.dbpath = dbpath # !!!: Remember that you must update the self._format_parameters method # if you update the self type property. self.type = { 'int': 'INTEGER' ,'float': 'REAL' ,'str': 'TEXT' ,'bytes': 'BLOB' ,'prim': 'PRIMARY KEY' ,'intPrim': 'INTEGER PRIMARY KEY' ,'bool': 'BOOLEAN' ,'date': 'DATE' ,'datetime': 'TIMESTAMP' } self.validTypes = set(self.type.keys()) self.bindingDict = {} # Adapters and converters for the bool type sqlite3.register_adapter(bool, int) sqlite3.register_converter("BOOLEAN", lambda v: bool(int(v))) self.connection = Connection( self.dbpath ,check_same_thread=False ,detect_types=sqlite3.PARSE_DECLTYPES) atexit.register(self._finalize)
def initDB(db_file, db_dirPath): # # Database # db_conn = connect(db_file) db_conn = connect(db_file, check_same_thread=False) db_conn.row_factory = namedtuple_factory # db_conn.isolation_level = None # SQLite tune-ups db_conn.execute("PRAGMA synchronous = OFF;") db_conn.execute("PRAGMA temp_store = MEMORY;") # Force enable foreign keys check db_conn.execute("PRAGMA foreign_keys = ON;") # Store data in UNIX timestamp instead ISO format (sqlite default) # and None objects as 'NULL' strings from datetime import datetime from time import mktime from sqlite3 import register_adapter def adapt_datetime(ts): return mktime(ts.timetuple()) register_adapter(datetime, adapt_datetime) # # antiORM return Sqlite(db_conn, db_dirPath, False, True)
def __init__(self): # Boolean to record whether the database exists. We need this because # we need to check if the db file exists in the file system before we # connect to the database. exists = False # Check if the database exists in the file system. if os.path.isfile('spellingaid.db'): exists = True # Connect to the database and create a cursor. self.db = sqlite.connect('spellingaid.db', detect_types = sqlite.PARSE_DECLTYPES) self.db.text_factory = str self.c = self.db.cursor() # If the database didn't exist, initialise the tables. if not exists: self.db.executescript(INIT) # Register adapters and converters to let the database work with User # and Word objects. sqlite.register_adapter(User, lambda u : u.serialise()) sqlite.register_adapter(Word, lambda w : w.serialise()) sqlite.register_converter('User', User.deserialise) sqlite.register_converter('Word', Word.deserialise) self.listeners = []
def __init__(self, DBfn=None, DBcolumns=None, DB_DEBUG=False): self.DB_DEBUG = DB_DEBUG self.DBfn = DBfn self.DBcolumns = DBcolumns if self.DBfn is None: self.DBfn = os.path.join(os.path.expanduser('~'), 'Desktop', "MagicDB", __sqlext__) print("WARNING, creating/using a default database: {}".format(self.DBfn)) if not os.path.isdir(os.path.dirname(self.DBfn)): os.makedirs(os.path.dirname(self.DBfn)) sqlite3.register_converter("json", json.loads) sqlite3.register_adapter(list, json.dumps) sqlite3.register_adapter(dict, json.dumps) self.con = sqlite3.connect(self.DBfn, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) self.con.row_factory = sqlite3.Row self.con.text_factory = sqlite3.OptimizedUnicode self.cur = self.con.cursor() self.newDB = False # check that tables exist. if not, make them for t, v in self.DBcolumns.viewitems(): if not self.cur.execute('''PRAGMA table_info ('{}')'''.format(t)).fetchall(): self.cur.execute(v) self.con.commit() print("Created new table: {}".format(t)) self.newDB = True else: print("using existing table: {} ".format(t)) print("in file: {}".format(self.DBfn)) self.tables = [a[0] for a in self.cur.execute('''SELECT name FROM sqlite_master WHERE type='table' ''').fetchall()]
def custom_sort( db_filename ): # register the functions for manipulating the type sqlite3.register_adapter(MyDataObject, quiet_adapter_function) sqlite3.register_converter("MyDataObject", quiet_converter_function) with closing( sqlite3.connect( db_filename, detect_types = sqlite3.PARSE_DECLTYPES )) as conn: # define the collation conn.create_collation('unpickle', collation_function) # clear the table and insert new values conn.execute(""" delete from obj """) conn.executemany(""" insert into obj (data) values (?) """, [ ( MyDataObject(i), ) for i in xrange(5, 0, -1) ], ) # query the db for the objects just saved print "Querying:" cursor = conn.cursor() cursor.execute(""" select id, data from obj order by data collate unpickle """) for obj_id, obj in cursor.fetchall(): print obj_id, obj
def load_manager(filename=None): """Connects to the SQLite database with the given filename. If this is None, then it connects to an in-memory database (used for testing). """ from passage_list import PassageList, PassageListManager sqlite3.register_adapter(PassageList, lambda self: self.id) sqlite3.register_adapter(PassageListManager, lambda self: self.id) global connection, previous_filename if filename is None: filename = ":memory:" assert connection is None or previous_filename == filename previous_filename = filename manager = PassageListManager() try: if connection is None: connection = sqlite3.connect(filename) _maybe_setup_database(manager) _load_topic_children(manager) manager.parent = None except sqlite3.Error, e: import os manager.has_error_on_loading = True print "SQLITE loading error" import traceback traceback.print_exc()
def classify(eigvects, name): print 'eigvects: ', eigvects # Converts np.array to TEXT when inserting sqlite3.register_adapter(np.ndarray, cvtNparr.adapt_array) # Converts TEXT to np.array when selecting sqlite3.register_converter("array", cvtNparr.convert_array) conn = sqlite3.connect("/home/wlw/oliverProjects/3DClassification/classification.db", detect_types=sqlite3.PARSE_DECLTYPES) cur = conn.cursor() cur.execute("select eigvects, id from model where type='flat'") lists = cur.fetchall() for lis in lists: # lis是一个tuple #print 'lis[0]: ', lis[0] #print type(lis[0]) res = lis[0] - eigvects summ = 0 for r in res: d = math.sqrt(sum(math.pow(value, 2) for value in r)) summ += d similarity = summ / 3.0 print '%s\'s similarity with %s is %f ' % (lis[1], name, similarity) conn.close()
def create_db(self): """ Check if the database is stored in the path """ if config.create_config_dir(self.user_path): self.conn = sqlite3.connect(self.user_path + self.filename, detect_types=sqlite3.PARSE_DECLTYPES) sqlite3.register_adapter(bool, int) sqlite3.register_converter("BOOLEAN", lambda v: int(v) != 0)
def _tell_sqlite_about_numpy(self): for t in (np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16, np.uint32, np.uint64): sqlite3.register_adapter(t, long) for f in (np.float, np.float32, np.float64): sqlite3.register_adapter(f, float)
def _sqlite3(self, name): """Open/create a sqlite3 DB file""" def dict_factory(cursor, row): d = {} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d def converter(data): return json.loads(data.decode('utf-8')) sqlite3.register_adapter(list, json.dumps) sqlite3.register_adapter(dict, json.dumps) sqlite3.register_converter("json", converter) conn = sqlite3.connect(self.name, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES, isolation_level=None) conn.row_factory = dict_factory sqlscript = """ create table if not exists doit ( task_id text not null primary key, task_data json );""" try: conn.execute(sqlscript) except sqlite3.DatabaseError as exception: new_message = ( 'Dependencies file in %(filename)s seems to use ' 'an bad format or is corrupted.\n' 'To fix the issue you can just remove the database file(s) ' 'and a new one will be generated.' 'Original error: %(msg)s' % {'filename': repr(self.name), 'msg': str(exception)}) raise DatabaseException(new_message) return conn
def _register_adapter(value, key): """Register an adapter if the type of value is unknown.""" # Assuming no storage of non-simple types on channel 'resumed_from' if (not isinstance(value, (type(None), int, float, six.string_types, bytes, numpy.ndarray)) and key != 'resumed_from'): sqlite3.register_adapter(type(value), adapt_obj)
def _new_database(self): """ create and connect to a new sqlite database. raise an error if there already is a database in place, asking the user to manually remove the database (for safety reasons) """ # TODO: remove next two lines after testing -> don't automatically remove if os.path.exists(self.database): os.remove(self.database) if os.path.exists(self.database): message = "Database already exists, please remove manually: %s" % self.database logger.error(message) raise IOError(message) else: logger.info("Database not found, creating database %s" % self.database) try: self.connection = sqlite3.connect( self.database, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES ) except: message = "Failed to create database: %s" % self.database logger.error(message) raise sqlite3.OperationalError(message) # re-raise error self._create_dbstructure() sqlite3.register_adapter(bool, int) sqlite3.register_converter("BOOLEAN", lambda v: bool(int(v))) # tuples self.connection.row_factory = sqlite3.Row
def __init__(self, path): # Constants self.FORMATS = {} self.FORMATS['runtime'] = [('item','TEXT'), ('value','TEXT')] self.FORMATS['conf'] = [('sec','TEXT'), ('opt','TEXT'), ('val', 'TEXT')] self.FORMATS['io'] = [('hid','INTEGER'), ('pid','INTEGER'), ('tid','INTEGER'), ('fsize', 'INTEGER'), ('bsize', 'INTEGER'), ('elapsed', 'BLOB'), ('sync', 'REAL'), ('agg', 'REAL'), ('aggnoclose', 'REAL'), ('opavg', 'REAL'), ('opmin', 'REAL'), ('opmax', 'REAL'), ('opstd', 'REAL')] self.FORMATS['meta'] = [('hid','INTEGER'), ('pid','INTEGER'), ('tid','INTEGER'), ('opcnt', 'INTEGER'), ('factor', 'INTEGER'), ('elapsed', 'BLOB'), ('sync', 'REAL'), ('agg', 'REAL'), ('opavg', 'REAL'), ('opmin', 'REAL'), ('opmax', 'REAL'), ('opstd', 'REAL')] self.FORMATS['aggdata'] = [('hostid','INTEGER'), ('pid','INTEGER'), ('tid','INTEGER'), ('oper','TEXT'), ('optype', 'INTEGER'), ('min','REAL'), ('max','REAL'), ('avg','REAL'), ('agg','REAL'), ('std','REAL'), ('time', 'REAL')] self.FORMATS_LEN = {} for k, v in self.FORMATS.items(): self.FORMATS_LEN[k] = len(self.FORMATS[k]) sqlite3.register_converter("BLOB", lambda s:cPickle.loads(str(s))) sqlite3.register_adapter(list, cPickle.dumps) sqlite3.register_adapter(dict, cPickle.dumps) self.db = sqlite3.connect(path, detect_types=sqlite3.PARSE_DECLTYPES) self.cur = self.db.cursor() self.tables = [] # all tables in database
def compute_allpairs_sim(self, interact_type, data_type): mat_sim = compute_allpairs_sim_mat(self.get_nodes_list(should_have_interactions=True), interact_type, data_type) # think of transactions, prepared statements, without rowid, pragma etc. import numpy as np for dt in (np.int64, np.int32): sqlite3.register_adapter(dt, long) conn = sqlite3.connect(':memory:') c = conn.cursor() print "Starting inserting data" """ c.execute('''CREATE TABLE stocks (i integer, j integer, value real, PRIMARY KEY(i,j))''') oneM = 1000000 for i in range(0,318000000,oneM): print "Done", i c.executemany("INSERT INTO stocks VALUES (?, ?, ?)", izip(mat_sim.row[i:i+oneM], mat_sim.col[i:i+oneM], mat_sim.data[i:i+oneM])) """ c.execute('''CREATE TABLE stocks (ij integer PRIMARY KEY, value real)''') oneM = 1000000 for i in range(0,318000000,oneM): print "Done", i #TODO CAUTION: this is a hack. Make sure to multiply by atleast 32 and control for overflow:would not work for 300k data row_64 = mat_sim.row[i:i+oneM].astype("int64") IJ = (row_64<<32)+ mat_sim.col[i:i+oneM] c.executemany("INSERT INTO stocks VALUES (?, ?)", izip(IJ, mat_sim.data[i:i+oneM])) conn.commit() print "Inserted data" #conn.close() self.sim_mat = conn return self.sim_mat
def setUp(self): self.con = sqlite.connect(":memory:") try: del sqlite.adapters[int] except: pass sqlite.register_adapter(int, ObjectAdaptationTests.cast) self.cur = self.con.cursor()
def __init__(self): sqlite3.register_converter('GUID', lambda b: uuid.UUID(bytes_le=b)) sqlite3.register_adapter(uuid.UUID, lambda u: buffer(u.bytes_le)) self.conn = sqlite3.connect('Feeds.db', detect_types=sqlite3.PARSE_DECLTYPES)
def connect_db(): """Connects to the specific database.""" rv = sqlite3.connect(app.config['DATABASE'], detect_types=sqlite3.PARSE_DECLTYPES) sqlite3.register_adapter(bool, int) sqlite3.register_converter("boolean", lambda v: bool(int(v))) rv.row_factory = sqlite3.Row return rv
def unit_test2(): sqlite3.register_adapter(dict, adapt_any) # 注册转换器 sqlite3.register_converter("STRSET", convert_any) # 定义新的数据类型 conn = sqlite3.connect(":memory:", detect_types=sqlite3.PARSE_DECLTYPES) c = conn.cursor() c.execute("CREATE TABLE test (id_set STRSET)") c.execute("INSERT INTO test (id_set) VALUES (?)", ( {1:"a", 2:"b", 3:"c"},) ) print(c.execute("SELECT * FROM test").fetchall())
def __init__(self): # initialise database connection sqlite3.register_adapter(datetime.datetime, self.adapt_datetime) sqlite3.register_converter('timestamp', self.convert_datetime) path = os.path.join(dixie.PROFILE, PROGRAM_DB) self.conn = sqlite3.connect(path, timeout=10, detect_types=sqlite3.PARSE_DECLTYPES, check_same_thread=False) self.conn.row_factory = sqlite3.Row
def __init__(self): sqlite3.register_adapter(datetime.datetime, self.adapt_datetime) sqlite3.register_converter('DATETIME', self.convert_datetime) self.db_name = os.path.abspath('../../db/versions.db') self.connection = sqlite3.connect(self.db_name, detect_types=sqlite3.PARSE_DECLTYPES, isolation_level=None) self.connection.row_factory = sqlite3.Row self.cursor = self.connection.cursor()
def __init__(self): self.conn = sqlite3.connect(SQLITE_FILE, detect_types=sqlite3.PARSE_DECLTYPES) sqlite3.register_adapter(datetime.datetime, self._adapt_to_sqlite_ts) #sqlite3.register_converter('DATETIME', _self._convert_to_ts) self.cursor = self.conn.cursor() self.workset_table = None self.workset = None self.workset_status = 0 self.create_table = '''CREATE TABLE IF NOT EXISTS %s
def start(db_path='library.db', create_script='library.ddl'): sqlite3.register_adapter(bool, int) sqlite3.register_converter("BOOLEAN", lambda v: v != '0') if os.path.isfile(db_path): conn = sqlite3.connect(db_path) else: conn = sqlite3.connect(db_path) create_tables(conn, create_script) conn.execute('PRAGMA FOREIGN_KEYS = 1;') return conn
import datetime import json import sqlite3 def adapt_list_to_JSON(lst): return json.dumps(lst).encode("utf8") def convert_JSON_to_list(data): return json.loads(data.decode("utf8")) sqlite3.register_adapter(list, adapt_list_to_JSON) sqlite3.register_converter("json", convert_JSON_to_list) class DBManager: def __init__(self, db_path, is_global=False): self.db_path = db_path self.is_global = is_global if self.is_global: self.create_experiment_ids_table() def create_experiment_ids_table(self): self.execute( "CREATE TABLE IF NOT EXISTS experiment_ids (id integer primary key autoincrement, experiment_name text unique, has_records integer)" ) def new_experiment(self, experiment_name): if not self.experiment_name_has_records(experiment_name):
class DBconnection(object): """Define a class to share common methods between TestCase.""" path = os.path.join(tmp.gettempdir(), randstr(prefix="temp", suffix=".db")) connection = sqlite3.connect(get_path(path)) for t in ( np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16, np.uint32, np.uint64, ): sqlite3.register_adapter(t, int) columns = [ ("cat", "INTEGER PRIMARY KEY"), ("cint", "INT"), ("creal", "REAL"), ("ctxt", "TEXT"), ] def create_table_instance(self, **kw): """Return a Table class instance :param **kw: keyword arguments of Table class without name and connection. :type **kw: key-word arguments :returns: Table instance """ self.tname = randstr(prefix="temp") return Table(name=self.tname, connection=self.connection, **kw) def create_empty_table(self, columns=None, **kw): """Create an empty table in the database and return Table class instance. :param columns: list of tuple containing the column names and types. :type columns: list of tuple :param **kw: keyword arguments of Table class without name and connection. :type **kw: key-word arguments :returns: Table instance """ columns = self.columns if columns is None else columns table = self.create_table_instance(**kw) table.create(columns) return table def create_not_empty_table(self, nrows=None, values=None, columns=None, **kw): """Create a not empty table in the database and return Table class instance. :param nrows: number of rows. :type nrows: list of tuple :param values: list of tuple containing the values for each row. :type values: list of tuple :param columns: list of tuple containing the column names and types. :type columns: list of tuple :param **kw: keyword arguments of Table class without name and connection. :type **kw: key-word arguments :returns: Table instance """ if nrows is None and values is None: msg = "Both parameters ``nrows`` ``values`` are empty" raise RuntimeError(msg) columns = self.columns if columns is None else columns values = get_table_random_values(nrows, columns) if values is None else values table = self.create_empty_table(columns=columns, **kw) table.insert(values, many=True) return table def setUp(self): """Create a not empty table instance""" self.table = self.create_not_empty_table(10) self.cols = self.table.columns def tearDown(self): """Remove the generated vector map, if exist""" self.table.drop(force=True) self.table = None self.cols = None
import cPickle import sqlite3 import os.path from btceapi import common from btceapi.public import Trade import decimal import datetime # Add support for conversion to/from decimal def adapt_decimal(d): return int(d*decimal.Decimal("1e8")) def convert_decimal(s): return decimal.Decimal(s) * decimal.Decimal("1e-8") sqlite3.register_adapter(decimal.Decimal, adapt_decimal) sqlite3.register_converter("DECIMAL", convert_decimal) class BTC_Database(object): def __init__(self, db_path): create = not os.path.isfile(db_path) self.connection = sqlite3.connect(db_path) self.cursor = self.connection.cursor() if create: # The database is new, so create tables and populate the enumerations. self.createTables() # Pairs table pairs = zip(range(len(common.all_pairs)), common.all_pairs) self.cursor.executemany("INSERT INTO pairs VALUES(?, ?)", pairs)
return str(coord).encode('utf-8') def _convert_coordinates(s: str): return Coordinates.fromString(s.decode('utf-8')) def _adapt_world(world: World): return world.value def _convert_world(id): return World(int(id)) sqlite3.register_adapter(Coordinates, _adapt_coordinates) sqlite3.register_adapter(World, _adapt_world) sqlite3.register_converter("coordinates", _convert_coordinates) sqlite3.register_converter("world", _convert_world) class SqliteBackend(BackendInterface): _SCHEME = [ """CREATE TABLE IF NOT EXISTS `items` ( `item_id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE, `guild_id` INTEGER NOT NULL, `shop_id` INTEGER NOT NULL, `name` TEXT, `price` TEXT );""", """CREATE TABLE IF NOT EXISTS `shops` ( `shop_id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
# функция, которая преобразует запись из базы данных в словарь def dict_factory(cursor, row): d = {} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d # Time def convert_time(s): return datetime.datetime.strptime(s.decode(), '%H:%M:%S').time() # Register the adapter sqlite3.register_adapter(datetime.time, lambda d: d.strftime("%H:%M:%S")) # Register the converter sqlite3.register_converter("time", convert_time) # Timedelta def adapt_timedelta(td): return str(td.total_seconds()) def convert_timedelta(s): return datetime.timedelta(seconds=int(s.decode())) # Register the adapter sqlite3.register_adapter(datetime.timedelta, adapt_timedelta)
def query(dbfile, table='meas', cols='*', where=None, groupby=None, raw=False, verbose=False): """ Get rows from the database """ t0 = time.time() sqlite3.register_adapter(np.int8, int) sqlite3.register_adapter(np.int16, int) sqlite3.register_adapter(np.int32, int) sqlite3.register_adapter(np.int64, int) sqlite3.register_adapter(np.float16, float) sqlite3.register_adapter(np.float32, float) sqlite3.register_adapter(np.float64, float) db = sqlite3.connect(dbfile, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES) cur = db.cursor() # Convert numpy data types to sqlite3 data types d2d = {"TEXT": (np.str, 200), "INTEGER": np.int, "REAL": np.float} # Start the SELECT statement cmd = 'SELECT ' + cols + ' FROM ' + table # Add WHERE statement if where is not None: cmd += ' WHERE ' + where # Add GROUP BY statement if groupby is not None: cmd += ' GROUP BY ' + groupby # Execute the select command if verbose: print('CMD = ' + cmd) cur.execute(cmd) data = cur.fetchall() # No results if len(data) == 0: return np.array([]) # Return the raw results if raw is True: return data # Get table column names and data types cur.execute("select sql from sqlite_master where tbl_name = '" + table + "'") dum = cur.fetchall() db.close() head = dum[0][0] # 'CREATE TABLE exposure(expnum TEXT, nchips INTEGER, filter TEXT, exptime REAL, utdate TEXT, uttime TEXT, airmass REAL, wcstype TEXT)' lo = head.find('(') hi = head.find(')') head = head[lo + 1:hi] columns = head.split(',') columns = dln.strip(columns) dt = [] for c in columns: pair = c.split(' ') dt.append((pair[0], d2d[pair[1]])) dtype = np.dtype(dt) # Convert to numpy structured array cat = np.zeros(len(data), dtype=dtype) cat[...] = data del (data) if verbose: print('got data in ' + str(time.time() - t0) + ' sec.') return cat
self._advance_by_page_num, self._thumb_classname, self._image_id, self._image_data, self._tag_classnames_to_namespaces) def GetGalleryParsingInfo(self): return (self._search_url, self._advance_by_page_num, self._search_separator, self._thumb_classname) def GetName(self): return self._name def GetNamespaces(self): return list(self._tag_classnames_to_namespaces.values()) sqlite3.register_adapter(Booru, yaml.safe_dump) class Credentials(HydrusData.HydrusYAMLBase): yaml_tag = '!Credentials' def __init__(self, host, port, access_key=None): HydrusData.HydrusYAMLBase.__init__(self) if host == 'localhost': host = '127.0.0.1' self._host = host
'WHERE (chain = :chain) AND (draw > :burn)'), 'select_thin': ('SELECT * FROM [{table}] ' 'WHERE (chain = :chain) AND ' '(draw - (SELECT draw FROM [{table}] ' 'WHERE chain = :chain ' 'ORDER BY draw LIMIT 1)) % :thin = 0'), 'select_burn_thin': ('SELECT * FROM [{table}] ' 'WHERE (chain = :chain) AND (draw > :burn) ' 'AND (draw - (SELECT draw FROM [{table}] ' 'WHERE (chain = :chain) AND (draw > :burn) ' 'ORDER BY draw LIMIT 1)) % :thin = 0'), 'select_point': ('SELECT * FROM [{table}] ' 'WHERE (chain = :chain) AND (draw = :draw)'), } sqlite3.register_adapter(np.int32, int) sqlite3.register_adapter(np.int64, int) class SQLite(base.BaseTrace): """SQLite trace object Parameters ---------- name : str Name of database file model : Model If None, the model is taken from the `with` context. vars : list of variables Sampling values will be stored for these variables. If None, `model.unobserved_RVs` is used.
def register_adapters(self, adapters: Tuple[Tuple[Any, Callable]]) -> None: for python_type, adapter_func in adapters: sqlite3.register_adapter(python_type, adapter_func)
# Setup adapters for sqlite try: import sqlite3 except ImportError: # Some systems have trouble with this pass else: # These are copied from Python sqlite3.dbapi2 def adapt_date(val): return val.isoformat() def adapt_datetime(val): return val.isoformat(" ") sqlite3.register_adapter(FakeDate, adapt_date) sqlite3.register_adapter(FakeDatetime, adapt_datetime) # Setup converters for pymysql try: import pymysql.converters except ImportError: pass else: pymysql.converters.encoders[FakeDate] = pymysql.converters.encoders[ real_date] pymysql.converters.conversions[FakeDate] = pymysql.converters.encoders[ real_date] pymysql.converters.encoders[FakeDatetime] = pymysql.converters.encoders[ real_datetime] pymysql.converters.conversions[FakeDatetime] = pymysql.converters.encoders[
sqlite_types = { int: ' INTEGER', long: ' INTEGER', float: ' REAL', str: ' TEXT', unicode: ' TEXT' } # this function converts a scan into a binary blob for sqlite # it simply uses Python's pickle functionality. def adapt_data(data): return sqlite3.Binary(cPickle.dumps(data, protocol=2)) sqlite3.register_adapter(tuple, adapt_data) # the function to convert a blob back into a scan, by un-pickling def convert_data(data): return cPickle.loads(str(data)) sqlite3.register_converter('pickled', convert_data) class SQLiteReader(ReportReader): '''The SQLite implementation of the mzReport reader class.''' def __init__(self, file_name, table_name=None, sheet_name=None): if sheet_name and not table_name: table_name = sheet_name
_cache = None # Custom adapters and converters to translate between python and sqlite data def _convert_datetime(sql_value): return datetime.datetime.strptime(sql_value.decode('utf-8'), '%Y%m%d%H%M%S') def _adapt_datetime(py_value): return py_value.strftime('%Y%m%d%H%M%S') sqlite3.register_converter('DATETIME', _convert_datetime) sqlite3.register_converter('BOOL', lambda x: bool(int(x))) sqlite3.register_adapter(datetime.datetime, _adapt_datetime) # Using sqlite3's builtin bool adapter def get_default(): """Get a cache object created with the default values.""" global _cache if _cache is None: _cache = Cache(DB_PATH) return _cache class InfoHolder: """A class that holds grouped information.""" def __init__(self, **kwargs): """Convert the key word arguments into proper attributes."""
# adapters to store and retrieve numpy arrays in sqlite databases... # https://www.pythonforthelab.com/blog/storing-data-with-sqlite/#storing-numpy-arrays-into-databases def adapt_array(arr): out = io.BytesIO() np.save(out, arr) out.seek(0) return sqlite3.Binary(out.read()) def convert_array(text): out = io.BytesIO(text) out.seek(0) return np.load(out) sqlite3.register_adapter(np.ndarray, adapt_array) sqlite3.register_converter("array", convert_array) class Training(): def __init__(self, dataset): self.dataset = dataset self.features = [] # method to calculate the features of every image and indicate in the database if the image is "active" def _calculate_features(self): """ Calculates the feature map for given image. """ def calculate_features(model, preprocessor, img): # preprocess the image
tzinfo=tzoffset(None, tzsecs)) except: pass return parse_date(val, as_utc=False) return None convert_timestamp = _py_convert_timestamp if _c_speedup is None else \ _c_convert_timestamp def adapt_datetime(dt): return isoformat(dt, sep=' ') sqlite.register_adapter(datetime, adapt_datetime) sqlite.register_converter('timestamp', convert_timestamp) def convert_bool(val): return val != '0' sqlite.register_adapter(bool, lambda x : 1 if x else 0) sqlite.register_converter('bool', convert_bool) sqlite.register_converter('BOOL', convert_bool) class DynamicFilter(object): def __init__(self, name):
links = get_DOI_links(DOI) print(links) sqlite_cursor.execute( "UPDATE ezproxy_doi SET pdf_link = ? WHERE ezproxy_doi_id = ?", (links["application/pdf"], ezproxy_doi_id)) sqlite_cursor.execute( "UPDATE ezproxy_doi SET xml_link = ? WHERE ezproxy_doi_id = ?", (links["application/xml"], ezproxy_doi_id)) sqlite_cursor.execute( "UPDATE ezproxy_doi SET unspecified_link = ? WHERE ezproxy_doi_id = ?", (links["application/pdf"], ezproxy_doi_id)) print(f"Inserted {ezproxy_doi_id} of {len(data)}") return sqlite3.register_adapter(dict, adapt_json) sqlite3.register_adapter(list, adapt_json) sqlite3.register_adapter(tuple, adapt_json) sqlite3.register_converter('JSON', convert_json) conn = sqlite3.connect("ezproxy-DOI.db") sqlite_cursor = conn.cursor() #add_link_columns(sqlite_cursor) sqlite_cursor.execute("SELECT * FROM ezproxy_doi") #sqlite_cursor.execute("SELECT * FROM ezproxy_doi WHERE doi = ?", ("10.1177/0049124113500475",)) data = sqlite_cursor.fetchall() add_DOI_links(sqlite_cursor)
def _register_pickler_adapters(self): sqlite3.register_adapter(list, self._dumps) sqlite3.register_adapter(set, self._dumps) sqlite3.register_adapter(dict, self._dumps) sqlite3.register_adapter(tuple, self._dumps) sqlite3.register_adapter(Response, self._dumps) sqlite3.register_converter(str("PICKLE"), self._loads) sqlite3.register_adapter(bool, int) sqlite3.register_converter(str("BOOLEAN"), lambda v: bool(int(v)))
## restore the partial model and train before making an inference ###another thread keep inferencing ### merge thread when training is done import sqlite3 from datetime import datetime import pickle from ImgObjClass import Item,Img def imgObjReverseDBConvertor(ImgObjStr): return pickle.loads(ImgObjStr) sqlite3.register_adapter(Img, imgObjReverseDBConvertor) ###time functions def getTimeObjFrmStr(timestampString): return datetime.strptime(timestampString, '%Y-%m-%d %H:%M:%S.%f') def getConnection(database): conn = sqlite3.connect('./Database/'+database+'.db') return conn
def __init__(self, **kwargs): # noqa: ignore=C901 pylint: disable=R0912 """Constructor.""" if not kwargs.get("sqlite_file"): raise ValueError("Please provide an SQLite file") if not isfile(kwargs.get("sqlite_file")): raise FileNotFoundError("SQLite file does not exist") if not kwargs.get("mysql_user"): raise ValueError("Please provide a MySQL user") self._sqlite_file = realpath(kwargs.get("sqlite_file")) self._mysql_user = str(kwargs.get("mysql_user")) self._mysql_password = (str(kwargs.get("mysql_password")) if kwargs.get("mysql_password") else None) self._mysql_host = str(kwargs.get("mysql_host") or "localhost") self._mysql_port = int(kwargs.get("mysql_port") or 3306) self._chunk_size = int( kwargs.get("chunk")) if kwargs.get("chunk") else None self._logger = self._setup_logger( log_file=kwargs.get("log_file") or None) self._mysql_database = str(kwargs.get("mysql_database") or "transfer") self._mysql_integer_type = str( kwargs.get("mysql_integer_type") or "INT(11)").upper() self._mysql_string_type = str( kwargs.get("mysql_string_type") or "VARCHAR(255)").upper() sqlite3.register_adapter(Decimal, adapt_decimal) sqlite3.register_converter("DECIMAL", convert_decimal) sqlite3.register_adapter(timedelta, adapt_timedelta) sqlite3.register_converter("TIME", convert_timedelta) if six.PY2: sqlite3.register_converter("BLOB", convert_blob) self._sqlite = sqlite3.connect(realpath(self._sqlite_file), detect_types=sqlite3.PARSE_DECLTYPES) self._sqlite.row_factory = sqlite3.Row self._sqlite_cur = self._sqlite.cursor() try: self._mysql = mysql.connector.connect( user=self._mysql_user, password=self._mysql_password, host=self._mysql_host, port=self._mysql_port, use_pure=True, ) if not self._mysql.is_connected(): raise ConnectionError("Unable to connect to MySQL") self._mysql_cur = self._mysql.cursor(prepared=True) try: self._mysql.database = self._mysql_database except mysql.connector.Error as err: if err.errno == errorcode.ER_BAD_DB_ERROR: self._create_database() else: self._logger.error(err) raise except mysql.connector.Error as err: self._logger.error(err) raise
def writecat(cat, dbfile, table='meas'): """ Write a catalog to the database """ ncat = dln.size(cat) sqlite3.register_adapter(np.int8, int) sqlite3.register_adapter(np.int16, int) sqlite3.register_adapter(np.int32, int) sqlite3.register_adapter(np.int64, int) sqlite3.register_adapter(np.float16, float) sqlite3.register_adapter(np.float32, float) sqlite3.register_adapter(np.float64, float) db = sqlite3.connect(dbfile, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES) c = db.cursor() # Convert numpy data types to sqlite3 data types d2d = {"S": "TEXT", "i": "INTEGER", "f": "REAL"} # Get the column names cnames = cat.dtype.names cdict = dict(cat.dtype.fields) # Create the table # the primary key ROWID is automatically generated if len( c.execute( 'SELECT name from sqlite_master where type= "table" and name="' + table + '"').fetchall()) < 1: columns = cnames[0].lower() + ' ' + d2d[cdict[cnames[0]][0].kind] for n in cnames[1:]: columns += ', ' + n.lower() + ' ' + d2d[cdict[n][0].kind] c.execute('CREATE TABLE ' + table + '(' + columns + ')') # Insert statement columns = [] for n in cnames: columns.append(n.lower()) qmarks = np.repeat('?', dln.size(cnames)) c.executemany( 'INSERT INTO ' + table + '(' + ','.join(columns) + ') VALUES(' + ','.join(qmarks) + ')', list(cat)) db.commit() db.close()
try: import cPickle as pickle except ImportError: import pickle # Problem: pickle dumps takes unicode strings but returns binary strings def listAdapter(l): return buffer(pickle.dumps(l)) def listConverter(s): return pickle.loads(s) sqlite3.register_adapter(list, listAdapter) sqlite3.register_converter(str("list"), listConverter) import glob #db_files = glob.glob('/home/ryan/Dropbox/mbcat/mbcat.db.*') db_files = [ #'/home/ryan/Dropbox/mbcat/mbcat.db.1401981167.bz2', #'/home/ryan/Dropbox/mbcat/mbcat.db.1402407797.bz2', #'/home/ryan/Dropbox/mbcat/mbcat.db.1404706835.bz2', #'/home/ryan/Dropbox/mbcat/mbcat.db.1404833080.bz2', #'/home/ryan/Dropbox/mbcat/mbcat.db.1405482875.bz2', #'/home/ryan/Dropbox/mbcat/mbcat.db.1409377728.bz2', #'/home/ryan/Dropbox/mbcat/mbcat.db.1410069776.bz2', #'/home/ryan/Dropbox/mbcat/mbcat.db.1410116048.bz2', #'/home/ryan/Dropbox/mbcat/mbcat.db.1410410368.bz2', #'/home/ryan/Dropbox/mbcat/mbcat.db.1411733272.bz2',
import flask, sqlite3, click, logging, json, datetime, uuid, sys, cmd from flask import Flask, request, jsonify, Response, g from flask_basicauth import BasicAuth sqlite3.register_converter('GUID', lambda b: uuid.UUID(bytes_le=b)) sqlite3.register_adapter(uuid.UUID, lambda u: u.bytes_le) app = flask.Flask(__name__) app.config["DEBUG"] = True DATABASE = 'forum1.db' SHARD1 = 'forum2.db' SHARD2 = 'forum3.db' SHARD3 = 'forum4.db' ## Basic authentication class dbAuth(BasicAuth): def check_credentials(this, username, password): app.config["username"] = username user_esists = existsInDB( "SELECT EXISTS (SELECT * FROM users WHERE username='******' AND password='******');" % (username, password)) if (not user_esists): raise InvalidUsage('401 UNAUTHORIZED', status_code=401) else: return True basicAuth = dbAuth(app)
return zlib.compress(molblock) def convert_molblockgz(molgz): """Convert compressed molblock to RDKit molecule Args: molgz: (str) zlib compressed molblock Returns: rdkit.Chem.Mol: molecule """ return MolFromMolBlock(zlib.decompress(molgz)) sqlite3.register_adapter(BitMap, adapt_BitMap) sqlite3.register_converter('BitMap', convert_BitMap) sqlite3.register_adapter(Mol, adapt_molblockgz) sqlite3.register_converter('molblockgz', convert_molblockgz) class FastInserter(object): """Use with to make inserting faster, but less safe By setting journal mode to WAL and turn synchronous off. Args: cursor (sqlite3.Cursor): Sqlite cursor Examples:
def train_with_batch(self, info, batch_val): val_precision = [] optimizer = torch.optim.Adam(model.parameters(), betas=(0.9, 0.999), eps=1e-9, lr=1e-4) Loss_len = 0 num_examples_seen = 1 # nepoch=nb_epoch() nepoch = 1000 print('training epoch :', nepoch) print('lengh of batch :', int(len(info[0]) / SEN_LEN)) ############################### database ################################ sqlite3.register_adapter(np.ndarray, adapt_array) # Converts TEXT to np.array when selecting sqlite3.register_converter("array", convert_array) con = sqlite3.connect("./test.db", detect_types=sqlite3.PARSE_DECLTYPES) cur = con.cursor() ######################################################################### for epoch in range(nepoch): print("에폭", epoch + 1) batch_train = generate_batch_train(info[0], info[1], info[2]) for i in range(len(batch_train)): batch_text, batch_img, batch_loc, batch_know, segment, batch_y, true_label, batch_mask = batch_train[i] text=[] start=time.time() cur.execute("select * from test where line in" + "{}".format(tuple(batch_text))) print('t1 :', time.time()-start) start = time.time() data_dic = dict(cur.fetchall()) print('t2 :', time.time() - start) start = time.time() for j in batch_text: text.append(np.expand_dims(data_dic[j], axis=0)) print('t3 :', time.time() - start) batch_text=np.concatenate(text, axis=0) # print(np.shape(batch_know)) # [batch_size, cat_num] # 3factors or 4factors lrate = math.pow(64, -0.5) * min(math.pow(num_examples_seen, -0.5), num_examples_seen * math.pow(4000, -1.5)) # warm up step : default 4000 loss = self.numpy_sdg_step((batch_text, batch_img, batch_loc, batch_know, segment, batch_y, batch_mask), optimizer, lrate, True) self.Loss += loss.item() Loss_len += 1 if num_examples_seen % 1000 == 0: # origin = int(batch_len * nepoch / 100) time_ = datetime.now().strftime('%Y-%m-%d %H:%M:%S') print(time_, ' ', int(100 * num_examples_seen / (len(batch_train) * nepoch)), end='') print('% 완료!!!', end='') print(' loss :', self.Loss / Loss_len) # , ' lr :', lrate) self.Loss = 0 Loss_len = 0 num_examples_seen += 1 print('Epoch', epoch + 1, 'completed out of', nepoch) # valid set : 8104개 val_pred = [] val_truth = [] for i in range(len(batch_val)): model.eval() batch_text, batch_img, batch_loc, batch_know, segment, batch_y, true_label, batch_mask = batch_val[i] val_prob = self.softmax( self.forward(torch.tensor(batch_text).to(device).float(), torch.tensor(batch_img).to(device).float(), torch.tensor(batch_loc).to(device).long(), torch.tensor(batch_know).to(device).long(), torch.tensor(segment).to(device).long(), torch.tensor(batch_mask).to(device).float(), False)) # if last_function() == "softmax": y_pred = np.argsort(val_prob.detach().cpu().numpy(), axis=1) for i in range(y_pred.shape[0]): val_pred.append(y_pred[i]) val_truth.append(true_label[i]) precision = top1_acc(val_truth, val_pred) print("Epoch:", (epoch + 1), "val_precision:", precision) val_precision.append(precision) torch.save({ 'epoch': epoch, 'model_state_dict': model.state_dict(), 'optimizer_state_dict': optimizer.state_dict(), 'num_examples_seen' : num_examples_seen }, 'project3_bert14_checkpoint/epoch='+str(epoch)) return val_precision
def main(args=None): """ The 'main' method parses the database name along with path as script arguments and loads the data in the database. """ # Parse the arguments if args is None: args = sys.argv[1:] parsed_args = parse_arguments(args=args) # Get the database path db_path = parsed_args.database if not os.path.isfile(db_path): raise OSError("The database file {} was not found. Did you mean to " "specify a different database?".format( os.path.abspath(db_path))) # Get the CSV directory csv_path = parsed_args.csv_location if not os.path.isdir(csv_path): raise OSError("The csv folder {} was not found. Did you mean to " "specify a different csv folder?".format( os.path.abspath(csv_path))) #### MASTER CSV DATA #### csv_data_master = pd.read_csv(os.path.join(csv_path, 'csv_data_master.csv')) # Register numpy types with sqlite, so that they are properly inserted # from pandas dataframes # https://stackoverflow.com/questions/38753737/inserting-numpy-integer-types-into-sqlite-with-python3 sqlite3.register_adapter(np.int64, lambda val: int(val)) sqlite3.register_adapter(np.float64, lambda val: float(val)) # connect to database conn = connect_to_database(db_path=db_path) # Load all data in directory if parsed_args.subscenario is None and parsed_args.subscenario_id is \ None and parsed_args.project is None: load_all_from_master_csv(conn=conn, csv_path=csv_path, csv_data_master=csv_data_master, quiet=parsed_args.quiet) elif parsed_args.subscenario is not None and parsed_args.subscenario_id \ is None: # Load all IDs for a subscenario-table load_all_subscenario_ids_from_directory(conn, csv_path, csv_data_master, parsed_args.subscenario, parsed_args.delete, parsed_args.quiet) else: # Load single subscenario ID (or project-subscenario ID) load_single_subscenario_id_from_directory( conn=conn, csv_path=csv_path, csv_data_master=csv_data_master, subscenario=parsed_args.subscenario, subscenario_id_to_load=parsed_args.subscenario_id, project=parsed_args.project, delete_flag=parsed_args.delete, quiet=parsed_args.quiet) # Close connection conn.close()
from arrow import Arrow from sqlite3 import adapt, register_adapter register_adapter(Arrow, lambda x: adapt(x.to('UTC').naive)) # https://docs.python.org/3.7/library/sqlite3.html#sqlite3.register_adapter
def construct_python_tuple(self, node): return tuple(self.construct_sequence(node)) def represent_python_tuple(self, data): return self.represent_sequence('tag:yaml.org,2002:python/tuple', data) yaml.SafeLoader.add_constructor('tag:yaml.org,2002:python/tuple', construct_python_tuple) yaml.SafeDumper.add_representer(tuple, represent_python_tuple) # for some reason, sqlite doesn't parse to int before this, despite the column affinity # it gives the register_converter function a bytestring :/ def integer_boolean_to_bool(integer_boolean): return bool(int(integer_boolean)) # sqlite mod sqlite3.register_adapter(dict, yaml.safe_dump) sqlite3.register_adapter(list, yaml.safe_dump) sqlite3.register_adapter(tuple, yaml.safe_dump) sqlite3.register_adapter(bool, int) sqlite3.register_converter('INTEGER_BOOLEAN', integer_boolean_to_bool) sqlite3.register_converter('TEXT_YAML', yaml.safe_load)
import numpy as np import multiprocessing import os import sqlite3 as sql import pandas as pd from itertools import repeat from contextlib import closing from tqdm import tqdm import interaction3.abstract as abstract from interaction3.mfield.simulations import TransmitBeamplot from interaction3.mfield.simulations import sim_functions as sim # register adapters for sqlite to convert numpy types sql.register_adapter(np.float64, float) sql.register_adapter(np.float32, float) sql.register_adapter(np.int64, int) sql.register_adapter(np.int32, int) ## PROCESS FUNCTIONS ## def init_process(_write_lock, sim, array): global write_lock, simulation write_lock = _write_lock sim = abstract.loads(sim) array = abstract.loads(array)
# -*- coding: utf-8 -*- # import json import sqlite3 import pickle import discord sqlite3.register_converter('pickle', pickle.loads) sqlite3.register_converter('json', json.loads) sqlite3.register_adapter(dict, json.dumps) sqlite3.register_adapter(list, pickle.dumps) db = sqlite3.connect("sina_datas.db", detect_types=sqlite3.PARSE_DECLTYPES, isolation_level=None) db.row_factory = sqlite3.Row cursor = db.cursor() def textto(k: str, user): if type(user) == str: lang = user try: with open(f"lang/{lang}.json", "r", encoding="utf-8") as j: f = json.load(j) except: return f"Not found language:`{lang}`(key:`{k}`)" try: return f[k] except: return f"Not found key:`{k}`" elif isinstance(user, discord.Guild):
def _updateChannelAndProgramListCaches(self, date, progress_callback, clearExistingProgramList): sqlite3.register_adapter(datetime.datetime, self.adapt_datetime) sqlite3.register_converter('timestamp', self.convert_datetime) return
def connect(name: str, debug: bool = False, version: int = -1) -> ConnectionPlus: """ Connect or create database. If debug the queries will be echoed back. This function takes care of registering the numpy/sqlite type converters that we need. Args: name: name or path to the sqlite file debug: whether or not to turn on tracing version: which version to create. We count from 0. -1 means 'latest'. Should always be left at -1 except when testing. Returns: conn: connection object to the database (note, it is `ConnectionPlus`, not `sqlite3.Connection` """ # register numpy->binary(TEXT) adapter # the typing here is ignored due to what we think is a flaw in typeshed # see https://github.com/python/typeshed/issues/2429 sqlite3.register_adapter(np.ndarray, _adapt_array) # register binary(TEXT) -> numpy converter # for some reasons mypy complains about this sqlite3.register_converter("array", _convert_array) sqlite3_conn = sqlite3.connect(name, detect_types=sqlite3.PARSE_DECLTYPES) conn = ConnectionPlus(sqlite3_conn) latest_supported_version = _latest_available_version() db_version = get_user_version(conn) if db_version > latest_supported_version: raise RuntimeError(f"Database {name} is version {db_version} but this " f"version of QCoDeS supports up to " f"version {latest_supported_version}") # sqlite3 options conn.row_factory = sqlite3.Row # Make sure numpy ints and floats types are inserted properly for numpy_int in [ np.int, np.int8, np.int16, np.int32, np.int64, np.uint, np.uint8, np.uint16, np.uint32, np.uint64 ]: sqlite3.register_adapter(numpy_int, int) sqlite3.register_converter("numeric", _convert_numeric) for numpy_float in [np.float, np.float16, np.float32, np.float64]: sqlite3.register_adapter(numpy_float, _adapt_float) for complex_type in complex_types: sqlite3.register_adapter(complex_type, _adapt_complex) sqlite3.register_converter("complex", _convert_complex) if debug: conn.set_trace_callback(print) init_db(conn) perform_db_upgrade(conn, version=version) return conn