def _connectToDb(self): """ Opens a db connection """ self.con = sqlite3.connect(self.db_path, detect_types=sqlite3.PARSE_DECLTYPES) sqlite3.register_adapter(bool, int) sqlite3.register_converter("BOOLEAN", lambda v: bool(int(v))) self.con.row_factory = self._dictFactory
def get_db(config): db = sqlite3.connect(config.get("CONNECTION", "SQLITE_DB")) db.row_factory = sqlite3.Row sqlite3.register_adapter(bool, int) sqlite3.register_converter("BOOLEAN", lambda v: bool(int(v))) return db
def __init__(self): # Boolean to record whether the database exists. We need this because # we need to check if the db file exists in the file system before we # connect to the database. exists = False # Check if the database exists in the file system. if os.path.isfile('spellingaid.db'): exists = True # Connect to the database and create a cursor. self.db = sqlite.connect('spellingaid.db', detect_types = sqlite.PARSE_DECLTYPES) self.db.text_factory = str self.c = self.db.cursor() # If the database didn't exist, initialise the tables. if not exists: self.db.executescript(INIT) # Register adapters and converters to let the database work with User # and Word objects. sqlite.register_adapter(User, lambda u : u.serialise()) sqlite.register_adapter(Word, lambda w : w.serialise()) sqlite.register_converter('User', User.deserialise) sqlite.register_converter('Word', Word.deserialise) self.listeners = []
def __init__(self, DBfn=None, DBcolumns=None, DB_DEBUG=False): self.DB_DEBUG = DB_DEBUG self.DBfn = DBfn self.DBcolumns = DBcolumns if self.DBfn is None: self.DBfn = os.path.join(os.path.expanduser('~'), 'Desktop', "MagicDB", __sqlext__) print("WARNING, creating/using a default database: {}".format(self.DBfn)) if not os.path.isdir(os.path.dirname(self.DBfn)): os.makedirs(os.path.dirname(self.DBfn)) sqlite3.register_converter("json", json.loads) sqlite3.register_adapter(list, json.dumps) sqlite3.register_adapter(dict, json.dumps) self.con = sqlite3.connect(self.DBfn, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) self.con.row_factory = sqlite3.Row self.con.text_factory = sqlite3.OptimizedUnicode self.cur = self.con.cursor() self.newDB = False # check that tables exist. if not, make them for t, v in self.DBcolumns.viewitems(): if not self.cur.execute('''PRAGMA table_info ('{}')'''.format(t)).fetchall(): self.cur.execute(v) self.con.commit() print("Created new table: {}".format(t)) self.newDB = True else: print("using existing table: {} ".format(t)) print("in file: {}".format(self.DBfn)) self.tables = [a[0] for a in self.cur.execute('''SELECT name FROM sqlite_master WHERE type='table' ''').fetchall()]
def classify(eigvects, name): print 'eigvects: ', eigvects # Converts np.array to TEXT when inserting sqlite3.register_adapter(np.ndarray, cvtNparr.adapt_array) # Converts TEXT to np.array when selecting sqlite3.register_converter("array", cvtNparr.convert_array) conn = sqlite3.connect("/home/wlw/oliverProjects/3DClassification/classification.db", detect_types=sqlite3.PARSE_DECLTYPES) cur = conn.cursor() cur.execute("select eigvects, id from model where type='flat'") lists = cur.fetchall() for lis in lists: # lis是一个tuple #print 'lis[0]: ', lis[0] #print type(lis[0]) res = lis[0] - eigvects summ = 0 for r in res: d = math.sqrt(sum(math.pow(value, 2) for value in r)) summ += d similarity = summ / 3.0 print '%s\'s similarity with %s is %f ' % (lis[1], name, similarity) conn.close()
def custom_sort( db_filename ): # register the functions for manipulating the type sqlite3.register_adapter(MyDataObject, quiet_adapter_function) sqlite3.register_converter("MyDataObject", quiet_converter_function) with closing( sqlite3.connect( db_filename, detect_types = sqlite3.PARSE_DECLTYPES )) as conn: # define the collation conn.create_collation('unpickle', collation_function) # clear the table and insert new values conn.execute(""" delete from obj """) conn.executemany(""" insert into obj (data) values (?) """, [ ( MyDataObject(i), ) for i in xrange(5, 0, -1) ], ) # query the db for the objects just saved print "Querying:" cursor = conn.cursor() cursor.execute(""" select id, data from obj order by data collate unpickle """) for obj_id, obj in cursor.fetchall(): print obj_id, obj
def __prepare_db(self, dbfile): (DB_SOURCE, DB_HOST, DB_PORT, DB_USERNAME, DB_PASSWORD, DB_DATABASE) = dbfile self.DB_SOURCE = DB_SOURCE if DB_SOURCE == "sqlite": self.IntegrityError = sqlite3.IntegrityError sqlite3.register_converter("pickle", cPickle.loads) self.dbcon = sqlite3.connect(DB_HOST, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES) self.dbcon.text_factory = unicode # This is the default, but we set it explicitly, just to be sure. self.dbcur = self.dbcon.cursor() self.dbcur.execute( "CREATE TABLE IF NOT EXISTS %s(id INTEGER PRIMARY KEY AUTOINCREMENT, item pickle, item_key CHAR(32))" % (self.table) ) self.dbcur.execute("CREATE UNIQUE INDEX IF NOT EXISTS unique_key ON %s (item_key)" % (self.table)) elif DB_SOURCE == "mysql": self.IntegrityError = MySQLdb.IntegrityError self.dbcon = MySQLdb.connect( host=DB_HOST, port=DB_PORT, user=DB_USERNAME, passwd=DB_PASSWORD, db=DB_DATABASE, charset="utf8" ) self.dbcur = self.dbcon.cursor() self.dbcur.execute( "CREATE TABLE IF NOT EXISTS %s(id INT NOT NULL AUTO_INCREMENT, item TEXT, item_key VARCHAR(32), PRIMARY KEY (id), UNIQUE INDEX unique_key (item_key))" % (self.table) ) else: self.logger.error("Invalid DB_SOURCE detected") self.dbcon.commit()
def __init__(self, dbpath): self.dbpath = dbpath # !!!: Remember that you must update the self._format_parameters method # if you update the self type property. self.type = { 'int': 'INTEGER' ,'float': 'REAL' ,'str': 'TEXT' ,'bytes': 'BLOB' ,'prim': 'PRIMARY KEY' ,'intPrim': 'INTEGER PRIMARY KEY' ,'bool': 'BOOLEAN' ,'date': 'DATE' ,'datetime': 'TIMESTAMP' } self.validTypes = set(self.type.keys()) self.bindingDict = {} # Adapters and converters for the bool type sqlite3.register_adapter(bool, int) sqlite3.register_converter("BOOLEAN", lambda v: bool(int(v))) self.connection = Connection( self.dbpath ,check_same_thread=False ,detect_types=sqlite3.PARSE_DECLTYPES) atexit.register(self._finalize)
def __init__(self, database_name): sql.register_adapter(np.ndarray, self.adapt_array) sql.register_converter("array", self.convert_array) self.conn = sql.connect('database/'+str(database_name)+'.db', isolation_level=None, detect_types=sql.PARSE_DECLTYPES, check_same_thread=False) if TYPE == 1: self.conn.execute("CREATE TABLE IF NOT EXISTS `files` (" "`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT," "`file_path` TEXT NOT NULL)") self.conn.execute("CREATE TABLE IF NOT EXISTS `features` (" "`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT," "`file_id` INTEGER NOT NULL," "`frame` INTEGER NOT NULL," "`feature` array NOT NULL," "`class` TEXT NOT NULL)") self.conn.execute("CREATE TABLE IF NOT EXISTS `final_weight` (" "`vectors` array NOT NULL," "`class` TEXT NOT NULL)") else: self.conn.execute("CREATE TABLE IF NOT EXISTS `output_classes` (" "`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT," "`file_path` TEXT NOT NULL," "`class` TEXT NOT NULL)") self.conn.execute("CREATE TABLE IF NOT EXISTS `feature_sets` (" "`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT," "`output_class_id` INTEGER NOT NULL," "`frame` INTEGER NOT NULL," "`features` array NOT NULL)")
def main(): # 2012 Illinois ACS PUMS file with NO pre-processing. filename = 'ss12pil.csv' # OnePerson class from week 3, store the 100th row in the object. p = OnePerson([]) p.read_line(filename, 100) # tell sqlite3 library that we will be using our custom functions # adapt_person and convert_person as adapter and converter, respectively. sqlite3.register_adapter(OnePerson, adapt_person) sqlite3.register_converter('person', convert_person) # use memory for testing, you can change it to a filename and store it # note that we are using DECLARED TYPES. conn = sqlite3.connect(':memory:', detect_types = sqlite3.PARSE_DECLTYPES) cur = conn.cursor() # call our custom functions. create_table(cur) insert_person(cur, p) print_head(cur) # close connection. cur.close() conn.close()
def create_db(self): """ Check if the database is stored in the path """ if config.create_config_dir(self.user_path): self.conn = sqlite3.connect(self.user_path + self.filename, detect_types=sqlite3.PARSE_DECLTYPES) sqlite3.register_adapter(bool, int) sqlite3.register_converter("BOOLEAN", lambda v: int(v) != 0)
def __init__(self, fname): import sqlite3 self.db = sqlite3.connect(fname, detect_types = sqlite3.PARSE_DECLTYPES) sqlite3.register_converter('timestamp', convert_datetime) ## @todo remove self.cursor self.cursor = self.db.cursor()
def _sqlite3(self, name): """Open/create a sqlite3 DB file""" def dict_factory(cursor, row): d = {} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d def converter(data): return json.loads(data.decode('utf-8')) sqlite3.register_adapter(list, json.dumps) sqlite3.register_adapter(dict, json.dumps) sqlite3.register_converter("json", converter) conn = sqlite3.connect(self.name, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES, isolation_level=None) conn.row_factory = dict_factory sqlscript = """ create table if not exists doit ( task_id text not null primary key, task_data json );""" try: conn.execute(sqlscript) except sqlite3.DatabaseError as exception: new_message = ( 'Dependencies file in %(filename)s seems to use ' 'an bad format or is corrupted.\n' 'To fix the issue you can just remove the database file(s) ' 'and a new one will be generated.' 'Original error: %(msg)s' % {'filename': repr(self.name), 'msg': str(exception)}) raise DatabaseException(new_message) return conn
def __prepare_db(self, dbfile): sqlite3.register_converter("pickle", cPickle.loads) self.dbcon = sqlite3.connect(dbfile, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) self.dbcon.text_factory = unicode # This is the default, but we set it explicitly, just to be sure. self.dbcur = self.dbcon.cursor() self.dbcur.execute("CREATE TABLE IF NOT EXISTS %s(id INTEGER PRIMARY KEY AUTOINCREMENT, item pickle)" % (self.table)) self.dbcon.commit()
def add_to_base(): """ add_to_base() This function get news array and forms the database. The main function. """ r = get_news() if (r.status_code == requests.codes.OK): news = json.loads(r.content) else: print r.headers return -1 sqlite3.register_converter("json", json.loads) db = sqlite3.connect(db_name, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) cur = db.cursor() cur.execute('''create table if not exists RSS (id INTEGER PRIMARY KEY AUTOINCREMENT, Date, Author, EventType, Summary json)''') cur.execute('''SELECT max(id),Date FROM RSS''') record = cur.fetchall() if (not record[0][0]): last_date_from_db = "" else: last_date_from_db = record[0][1] count = 0 while (count < len(news) and news[count]['created_at'] > last_date_from_db): count += 1 while (count > 0): count -= 1 summary = json.dumps(news[count]) cur.execute('''insert into RSS (id, Date, Author, EventType, Summary) VALUES (NULL,?,?,?,?)'''\ , (news[count]['created_at'], news[count]['actor']['login'], news[count]['type'], summary)) db.commit() db.close
def prepdb(self, dbname, debug=False): self.database = sqlite3.connect(database=dbname, timeout=1.0,detect_types=sqlite3.PARSE_DECLTYPES) sqlite3.register_converter('BOOL',convert_bool) self.database.create_function('distance', 2, absDist) self.database.create_function('bearing', 2, absBear) self.database.row_factory = sqlite3.Row cur = self.database.cursor() try: cur.execute("select version from version") row = cur.fetchone() vnum = row[0] vname = 'statements_v%03d' % vnum logging.debug('Database version name is: %s' % vname) for stgrp in self.allstatements[self.allstatements.index(vname)+1:]: stmts = Database.__dict__[stgrp] self.sqlexec(stmts) vnum = int(stgrp[-3:]) cur.execute("UPDATE version SET version=?", (vnum, )) self.database.commit() logging.debug('Database version is now %i' % vnum) except sqlite3.OperationalError: logging.debug('Trying database build from scratch') for stgrp in self.allstatements: stmts = Database.__dict__[stgrp] self.sqlexec(stmts) vnum = int(stgrp[-3:]) cur.execute("UPDATE version SET version=?", (vnum, )) self.database.commit() logging.debug('Database version is now %i' % vnum)
def _new_database(self): """ create and connect to a new sqlite database. raise an error if there already is a database in place, asking the user to manually remove the database (for safety reasons) """ # TODO: remove next two lines after testing -> don't automatically remove if os.path.exists(self.database): os.remove(self.database) if os.path.exists(self.database): message = "Database already exists, please remove manually: %s" % self.database logger.error(message) raise IOError(message) else: logger.info("Database not found, creating database %s" % self.database) try: self.connection = sqlite3.connect( self.database, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES ) except: message = "Failed to create database: %s" % self.database logger.error(message) raise sqlite3.OperationalError(message) # re-raise error self._create_dbstructure() sqlite3.register_adapter(bool, int) sqlite3.register_converter("BOOLEAN", lambda v: bool(int(v))) # tuples self.connection.row_factory = sqlite3.Row
def _connect(self): self._database = self._db_args["db"] sqlite3.register_converter("DATE", adapt_converter) self._conn = sqlite3.connect(self._database, detect_types=sqlite3.PARSE_DECLTYPES) self._conn.isolation_level = None # self._conn.row_factory = dict_factory self._cursor = self._conn.cursor()
def __init__(self, path): # Constants self.FORMATS = {} self.FORMATS['runtime'] = [('item','TEXT'), ('value','TEXT')] self.FORMATS['conf'] = [('sec','TEXT'), ('opt','TEXT'), ('val', 'TEXT')] self.FORMATS['io'] = [('hid','INTEGER'), ('pid','INTEGER'), ('tid','INTEGER'), ('fsize', 'INTEGER'), ('bsize', 'INTEGER'), ('elapsed', 'BLOB'), ('sync', 'REAL'), ('agg', 'REAL'), ('aggnoclose', 'REAL'), ('opavg', 'REAL'), ('opmin', 'REAL'), ('opmax', 'REAL'), ('opstd', 'REAL')] self.FORMATS['meta'] = [('hid','INTEGER'), ('pid','INTEGER'), ('tid','INTEGER'), ('opcnt', 'INTEGER'), ('factor', 'INTEGER'), ('elapsed', 'BLOB'), ('sync', 'REAL'), ('agg', 'REAL'), ('opavg', 'REAL'), ('opmin', 'REAL'), ('opmax', 'REAL'), ('opstd', 'REAL')] self.FORMATS['aggdata'] = [('hostid','INTEGER'), ('pid','INTEGER'), ('tid','INTEGER'), ('oper','TEXT'), ('optype', 'INTEGER'), ('min','REAL'), ('max','REAL'), ('avg','REAL'), ('agg','REAL'), ('std','REAL'), ('time', 'REAL')] self.FORMATS_LEN = {} for k, v in self.FORMATS.items(): self.FORMATS_LEN[k] = len(self.FORMATS[k]) sqlite3.register_converter("BLOB", lambda s:cPickle.loads(str(s))) sqlite3.register_adapter(list, cPickle.dumps) sqlite3.register_adapter(dict, cPickle.dumps) self.db = sqlite3.connect(path, detect_types=sqlite3.PARSE_DECLTYPES) self.cur = self.db.cursor() self.tables = [] # all tables in database
def register_types(self): sqlite.register_adapter(dt.datetime, self.adapt_datetime) sqlite.register_adapter(dict, self.adapt_json) sqlite.register_adapter(list, self.adapt_json) sqlite.register_adapter(tuple, self.adapt_json) sqlite.register_converter('datetime', self.convert_datetime) sqlite.register_converter('json', self.convert_json)
def __init__(self): sqlite3.register_adapter(datetime.datetime, self.adapt_datetime) sqlite3.register_converter('DATETIME', self.convert_datetime) self.db_name = os.path.abspath('../../db/versions.db') self.connection = sqlite3.connect(self.db_name, detect_types=sqlite3.PARSE_DECLTYPES, isolation_level=None) self.connection.row_factory = sqlite3.Row self.cursor = self.connection.cursor()
def __init__(self): # initialise database connection sqlite3.register_adapter(datetime.datetime, self.adapt_datetime) sqlite3.register_converter('timestamp', self.convert_datetime) path = os.path.join(dixie.PROFILE, PROGRAM_DB) self.conn = sqlite3.connect(path, timeout=10, detect_types=sqlite3.PARSE_DECLTYPES, check_same_thread=False) self.conn.row_factory = sqlite3.Row
def sqlite2json(exclude_tables=None,db_file=None): # Make sure blobs are base64 encoded sqlite3.register_converter('BLOB', base64.b64encode) conn = sqlite3.connect(db_file, detect_types=sqlite3.PARSE_DECLTYPES) cursor = conn.cursor() return json.dumps(get_tables(cursor))
def unit_test2(): sqlite3.register_adapter(dict, adapt_any) # 注册转换器 sqlite3.register_converter("STRSET", convert_any) # 定义新的数据类型 conn = sqlite3.connect(":memory:", detect_types=sqlite3.PARSE_DECLTYPES) c = conn.cursor() c.execute("CREATE TABLE test (id_set STRSET)") c.execute("INSERT INTO test (id_set) VALUES (?)", ( {1:"a", 2:"b", 3:"c"},) ) print(c.execute("SELECT * FROM test").fetchall())
def connect_db(): """Connects to the specific database.""" rv = sqlite3.connect(app.config['DATABASE'], detect_types=sqlite3.PARSE_DECLTYPES) sqlite3.register_adapter(bool, int) sqlite3.register_converter("boolean", lambda v: bool(int(v))) rv.row_factory = sqlite3.Row return rv
def __init__(self): sqlite3.register_converter('GUID', lambda b: uuid.UUID(bytes_le=b)) sqlite3.register_adapter(uuid.UUID, lambda u: buffer(u.bytes_le)) self.conn = sqlite3.connect('Feeds.db', detect_types=sqlite3.PARSE_DECLTYPES)
def sqlite_adapt(types=("JSON", "TIMESTAMP")): """Adds autoconversion support to SQLite for JSON and TIMESTAMP columns.""" types = [x.upper() for x in types] if "JSON" in types: [sqlite3.register_adapter(x, json_dumps) for x in (dict, list, tuple)] [sqlite3.register_converter(x, json_loads) for x in "json", "JSON"] if "TIMESTAMP" in types: sqlite3.register_converter("timestamp", parse_datetime) sqlite3.register_converter("TIMESTAMP", parse_datetime)
def start(db_path='library.db', create_script='library.ddl'): sqlite3.register_adapter(bool, int) sqlite3.register_converter("BOOLEAN", lambda v: v != '0') if os.path.isfile(db_path): conn = sqlite3.connect(db_path) else: conn = sqlite3.connect(db_path) create_tables(conn, create_script) conn.execute('PRAGMA FOREIGN_KEYS = 1;') return conn
def db_cur(source = ":memory:"): # Register the adapter sqlite3.register_adapter(decimal.Decimal, adapt_decimal) # Register the converter sqlite3.register_converter("DECTEXT", convert_decimal) conn = sqlite3.connect(source, detect_types=sqlite3.PARSE_DECLTYPES) #conn.row_factory = sqlite3.Row cur = conn.cursor() return conn, cur
def __init__(self, name): self.name = name profilePath = xbmc.translatePath(__settings__.getAddonInfo('profile')) if not os.path.exists(profilePath): os.makedirs(profilePath) self.databasePath = os.path.join(profilePath, name) sqlite3.register_adapter(datetime.datetime, self.adapt_datetime) sqlite3.register_converter('timestamp', self.convert_datetime) self.conn = sqlite3.connect(self.databasePath, detect_types=sqlite3.PARSE_DECLTYPES) self.create_tables()
def _updateChannelAndProgramListCaches(self, date, progress_callback, clearExistingProgramList): sqlite3.register_adapter(datetime.datetime, self.adapt_datetime) sqlite3.register_converter('timestamp', self.convert_datetime) if not self._isCacheExpired(date): return dixie.BackupChannels() self.updateInProgress = True self.updateFailed = False dateStr = date.strftime('%Y-%m-%d') if len(self.channelDict) == 0: channels = self.getAllChannels() for channel in channels: theChannel = self.getChannelFromFile(channel) if theChannel: self.channelDict[channel] = theChannel try: dixie.log('Updating caches...') if progress_callback: progress_callback(0) dixie.GetCats() if self.settingsChanged: self.source.doSettingsChanged() self.settingsChanged = False # only want to update once due to changed settings toDelete = self.getAllChannels() weight = 0 imported = imported_channels = imported_programs = 0 for item in self.source.getDataFromExternal( date, progress_callback): imported += 1 if isinstance(item, Channel): imported_channels += 1 channel = item clean = CleanFilename(channel.id) if clean in toDelete: toDelete.remove(clean) weight += 1 channel.weight = weight self.createChannel(channel) #channels updated try: settings.set('ChannelsUpdated', self.adapt_datetime(datetime.datetime.now()), settingsFile) except: pass self.deleteOldChannels(toDelete) if imported_channels == 0: self.updateFailed = True if imported_programs == 0: self.updateFailed = (not USE_DB_FILE) except SourceUpdateCanceledException: # force source update on next load try: settings.set('ChannelsUpdated', 0, settingsFile) except: pass except Exception: import traceback as tb import sys (etype, value, traceback) = sys.exc_info() tb.print_exception(etype, value, traceback) try: # invalidate cached data try: settings.set('ChannelsUpdated', 0, settingsFile) except: pass except: pass self.updateFailed = True update = dixie.GetSetting('updated.channels') if int(update) < 0: dixie.SetSetting('updated.channels', 0) dixie.SetSetting('current.channels', 0) else: dixie.SetSetting('current.channels', update) self.channelDict = {} self.updateInProgress = False self.updateInProgress = False
def __init__(self, name: str): sqlite3.register_adapter(Decimal, adapt_decimal) sqlite3.register_converter("decimal", convert_decimal) self.name = name self.Initialise()
import sqlite3 import numpy as np np.set_printoptions(threshold=np.inf) import utils import operator # Converts np.array to TEXT when inserting sqlite3.register_adapter(np.ndarray, utils.adapt_array) # Converts TEXT to np.array when selecting sqlite3.register_converter("array", utils.convert_array) conn = sqlite3.connect('../database.db', detect_types=sqlite3.PARSE_DECLTYPES) sql = "SELECT DISTINCT t.movielensid FROM movielens_tag t JOIN movielens_training_dataset mtd ON mtd.movielensid = t.movielensid" # sql_tags = "SELECT tag, COUNT(*) FROM movielens_tag WHERE movielensid = ? GROUP BY tag" sql_all_tags = "SELECT tag FROM movielens_all_tags" sql_tags = "SELECT tag, COUNT(*) FROM movielens_tag t " \ "JOIN movielens_rating r ON r.userid = t.userid AND r.movielensid = t.movielensid " \ "JOIN movielens_training_dataset mtd ON mtd.movielensid = r.movielensid " \ "WHERE t.movielensid = ? AND r.rating > 3 " \ "GROUP BY tag" # TOTAL_MOVIES = 2903 - 580 TOTAL_MOVIES = float(2903) TOTAL_TAGS = float(17508) k1 = 0.75 b = float(2) avg_tags = TOTAL_TAGS / TOTAL_MOVIES # pre-computed def getUsersByTag(conn, tag):
import numpy as np import os # defining new SQLITE datatype to be able to store numpy array types #--------------------------------------------------------------------FLOAT ARRAY def adapt_array(arr): return arr.tobytes() def convert_array(text): return np.frombuffer(text, dtype=np.float32) sqlite3.register_adapter(np.array, adapt_array) sqlite3.register_converter("array", convert_array) #--------------------------------------------------------------------SDR (SPARSE, contains active indicies) def adapt_sdr(arr): return arr.tobytes() def convert_sdr(text): return np.frombuffer(text, dtype=np.uint32) sqlite3.register_adapter(np.array, adapt_sdr) sqlite3.register_converter("sdr", convert_sdr)
try: return json.loads(value) if value is not None else None except Exception: raise psycopg2.InterfaceError("bad JSON representation: %r" % value) with gripe.db.Tx.begin() as tx: cur = tx.get_cursor() cur.execute("SELECT NULL::jsonb") jsonb_oid = cur.description[0][1] JSONB = psycopg2.extensions.new_type((jsonb_oid,), "JSONB", cast_jsonb) psycopg2.extensions.register_type(JSONB, None) elif gripe.db.Tx.database_type == "sqlite3": import sqlite3 def adapt_json(d): return json.dumps(d) def convert_json(value): return json.loads(value) if value is not None else None sqlite3.register_adapter(dict, adapt_json) sqlite3.register_converter("JSONB", convert_json) def adapt_key(k): return str(k) sqlite3.register_adapter(grumble.key.Key, adapt_key)
def setUp(self): self.con = sqlite.connect(':memory:', detect_types=sqlite.PARSE_COLNAMES) sqlite.register_converter('bin', BinaryConverterTests.convert)
def __init__(cls, name, bases, dct): super(MappingMeta, cls).__init__(name, bases, dct) sqlite3.register_converter(name, cls)
# Eos is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with Eos. If not, see <http://www.gnu.org/licenses/>. # =============================================================================== import sqlite3 from eos.util.repr import make_repr_str from .abc import BaseDataHandler # SQLite stores bools as 0 or 1, convert them to python bool sqlite3.register_converter('BOOLEAN', lambda v: int(v) == 1) class SQLiteDataHandler(BaseDataHandler): """ Handler for loading data from SQLite database. Data should be in Phobos-like format, for details on it refer to JSON data handler doc string. """ def __init__(self, dbpath): conn = sqlite3.connect(dbpath, detect_types=sqlite3.PARSE_DECLTYPES) conn.row_factory = sqlite3.Row self.cursor = conn.cursor() def get_evetypes(self): return self.__fetch_table('evetypes')
def converter_func(data): print('converter_func({})\n'.format(data)) return pickle.loads(data) class MyObj: def __init__(self, arg): self.arg = arg def __str__(self): return 'MyObj({!r})'.format(self.arg) sqlite3.register_adapter(MyObj, adapter_func) sqlite3.register_converter("MyObj", converter_func) to_save = [ (MyObj('this is a value to save'), ), (MyObj(42), ), ] with sqlite3.connect(db_filename, detect_types=sqlite3.PARSE_COLNAMES) as conn: conn.execute(""" CREATE TABLE if NOT EXISTS obj2( id integer PRIMARY KEY AUTOINCREMENT NOT NULL, data text ) """) cursor = conn.cursor()
def __init__(self, db_path): # SQLite stores bools as 0 or 1, convert them to python bool sqlite3.register_converter('BOOLEAN', lambda v: int(v) == 1) conn = sqlite3.connect(db_path, detect_types=sqlite3.PARSE_DECLTYPES) conn.row_factory = sqlite3.Row self.cursor = conn.cursor()
import sqlite3, MySQLdb from datetime import datetime def adapt_datetime(dt): return int((dt - datetime(1970, 1, 1)).total_seconds()) def convert_datetime(b): return datetime.fromtimestamp(int(b)) sqlite3.register_adapter(datetime, adapt_datetime) sqlite3.register_converter("DATETIME", convert_datetime) connsqlite3=sqlite3.connect('EnergyDataPortal.db', detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) lite=connsqlite3.cursor() connMySQL = MySQLdb.connect("SERVER", "USERNAME", "PASSWORD", "db_EnergyDataPortal") my = connMySQL.cursor() lite.executescript(''' DROP TABLE IF EXISTS RTWeather; DROP TABLE IF EXISTS WifiData; DROP TABLE IF EXISTS WifiLocations; DROP INDEX IF EXISTS idx_WifiData_locId_dateTime; CREATE TABLE IF NOT EXISTS WifiLocations ( id INTEGER PRIMARY KEY, name TEXT NOT NULL ); CREATE TABLE IF NOT EXISTS WifiData ( dateTime INTEGER,
# Get DOIs from ezproxy import sqlite3 def adapt_json(data): return (json.dumps(data, sort_keys=True)).encode() def convert_json(blob): return json.loads(blob.decode()) sqlite3.register_adapter(dict, adapt_json) sqlite3.register_adapter(list, adapt_json) sqlite3.register_adapter(tuple, adapt_json) sqlite3.register_converter('JSON', convert_json) conn = sqlite3.connect("../ezproxy-DOI.db") sqlite_cursor = conn.cursor() sqlite_cursor.execute("SELECT doi FROM ezproxy_doi WHERE doi IS NOT NULL") DOIs = [item[0] for item in sqlite_cursor.fetchall()] # Build a Graph Tools Representation of DOI Metadata Structure # Need a list of valid DOIs from halo import Halo from graph_tool.all import * from habanero import Crossref from requests.exceptions import HTTPError, ConnectionError from pprint import pprint from time import sleep # AUTHOR ROUTINE
# See the file COPYING for details. import os, sys, time, traceback, logging import shutil #import dataset import sqlite3 import uuid as uuidlib import glob import timer from class_item import * from utils import * sqlite3.register_converter('PUID', lambda b: uuidlib.UUID(b)) sqlite3.register_adapter(uuidlib.UUID, lambda u: str(u)) dlog = None clog = None class Database: def __init__( self ): while not glob.ready: print '.' time.sleep(1) self.connect() def connect( self ):
import sqlite3 import re dbname = 'fasolaminutes_parsing/minutes.db' newline_re = re.compile(r"\s*\\n+\s*") def parse_text(text): """Parse as either UTF-8 or Mac Roman""" try: text = text.decode('utf-8') except UnicodeDecodeError: text = text.decode('mac-roman') # Some text contains literal newlines (that should just be spaces) if '\\n' in text: text = newline_re.sub(' ', text) # Minutes text is separated on vertical newlines text = text.replace('\v', '\n') return text sqlite3.register_converter("TEXT", parse_text) def open(): """Open and return the minutes db""" db = sqlite3.connect(dbname, detect_types=sqlite3.PARSE_DECLTYPES) db.row_factory = sqlite3.Row return db
mime = process.communicate()[0].strip() self.hash, self.name, self.mime = hash, name, mime def __repr__(self): return "<File {0} {1}: {2}>".format(self.mime, self.hash, repr(self.name)) def adapt_file(file): return u"{0}:{1}:{2}".format(file.hash, file.mime, file.name) def convert_file(s): hash, mime, name = s.split(":", 2) return File(hash=hash, name=name, mime=mime) sqlite3.enable_callback_tracebacks(True) sqlite3.register_adapter(Coordinates, adapt_coordinates) sqlite3.register_converter("coordinates", convert_coordinates) sqlite3.register_adapter(File, adapt_file) sqlite3.register_converter("file", convert_file) db = sqlite3.connect(os.path.join(os.path.dirname(os.path.realpath(__file__)), "db.sqlite"), detect_types=sqlite3.PARSE_DECLTYPES) db.row_factory = sqlite3.Row db.execute("pragma foreign_keys = ON")
def construct_python_tuple(self, node): return tuple(self.construct_sequence(node)) def represent_python_tuple(self, data): return self.represent_sequence('tag:yaml.org,2002:python/tuple', data) yaml.SafeLoader.add_constructor('tag:yaml.org,2002:python/tuple', construct_python_tuple) yaml.SafeDumper.add_representer(tuple, represent_python_tuple) # for some reason, sqlite doesn't parse to int before this, despite the column affinity # it gives the register_converter function a bytestring :/ def integer_boolean_to_bool(integer_boolean): return bool(int(integer_boolean)) # sqlite mod sqlite3.register_adapter(dict, yaml.safe_dump) sqlite3.register_adapter(list, yaml.safe_dump) sqlite3.register_adapter(tuple, yaml.safe_dump) sqlite3.register_adapter(bool, int) sqlite3.register_converter('INTEGER_BOOLEAN', integer_boolean_to_bool) sqlite3.register_converter('TEXT_YAML', yaml.safe_load)
def register_custom_types(self, name, ty, con, decon): sqlite3.register_adapter(ty, con) sqlite3.register_converter(name, decon)
@author: dbad004 """ import sqlite3 from numpy import ndarray import zlib from six.moves import cPickle as pickle import six import numpy as np #teach sqlite about numpy arrays def adapt_numarray(array): return sqlite3.Binary(zlib.compress(array.dumps())) def convert_numarray(s): #print(type(s)) try: #assume data is zipped uz = zlib.decompress(s) #print(uz) if six.PY2: return np.loads(uz) else: return np.loads(uz, encoding='bytes') except: #fall back and just try unpickling return pickle.loads(s) sqlite3.register_adapter(ndarray, adapt_numarray) sqlite3.register_converter("ndarray", convert_numarray)
def __init__(self, filename=None, clean_n_entries=30): """ :param filename: path to database file :param clean_n_entries: during cleaning delete n % entries. """ self.clean_n_entries = clean_n_entries import sqlite3 # register numpy array conversion functions # uses "np_array" type in sql tables def adapt_array(arr): out = BytesIO() np.savez_compressed(out, offsets=arr) out.seek(0) return out.read() def convert_array(text): out = BytesIO(text) out.seek(0) npz = np.load(out) arr = npz['offsets'] npz.close() return arr # Converts np.array to TEXT when inserting sqlite3.register_adapter(np.ndarray, adapt_array) # Converts TEXT to np.array when selecting sqlite3.register_converter("NPARRAY", convert_array) self._database = sqlite3.connect( filename if filename is not None else ":memory:", detect_types=sqlite3.PARSE_DECLTYPES, timeout=5, isolation_level=None) self.filename = filename self.lru_timeout = 5.0 # python sqlite3 specifies timeout in seconds instead of milliseconds. try: cursor = self._database.execute("select num from version") row = cursor.fetchone() if not row: self.db_version = TrajectoryInfoCache.DB_VERSION version = self.db_version else: version = row[0] if version != TrajectoryInfoCache.DB_VERSION: # drop old db? or try to convert? self._create_new_db() except sqlite3.OperationalError as e: if "no such table" in str(e): self._create_new_db() self.db_version = TrajectoryInfoCache.DB_VERSION except sqlite3.DatabaseError: bak = filename + ".bak" warnings.warn( "TrajInfo database corrupted. Backing up file to %s and start with new one." % bak) self._database.close() import shutil shutil.move(filename, bak) SqliteDB.__init__(self, filename)
def __repr__(self): return "Name: %s, Price: %s " % \ (self.name, self.price) def adapt_car(car): return "%d;%s;%d" % (car.cid, car.name, cr.price) def convert_car(s): cid, name, price = s.split(";") return Car(cid, name, price) lite.register_adapter(Car, adapt_car) lite.register_converter("Car", convert_car) con = lite.connect(':memory:', detect_types = lite.PARSE_DECLTYPES) c1 = Car(1, 'Audi', 52642) c2 = Car(2, 'Mercedes', 57127) c3 = Car(3, 'Skoda', 9000) with con: cur = con.cursor() cur.execute("create tsble Cars(c Car)") cur.execute("insert into Cars values(?)", (c1,)) cur.execute("insert into Cars values(?)", (c2,)) cur.execute("insert into Cars values(?)", (c3,))
import sqlite3 import decimal D = decimal.Decimal def adapt_decimal(d): return str(d) def convert_decimal(s): return D(s) # Register the adapter sqlite3.register_adapter(D, adapt_decimal) # Register the converter sqlite3.register_converter("decimal", convert_decimal) DATABASE = "myshop.db" def query(sql, data): with sqlite3.connect(DATABASE) as db: cursor = db.cursor() cursor.execute("PRAGMA foreign_keys = ON") cursor.execute(sql, data) db.commit()
def _updateChannelAndProgramListCaches(self, date, progress_callback, clearExistingProgramList): # todo workaround service.py 'forgets' the adapter and convert set in _initialize.. wtf?! sqlite3.register_adapter(datetime.datetime, self.adapt_datetime) sqlite3.register_converter('timestamp', self.convert_datetime) if not self._isCacheExpired(date): return self.updateInProgress = True self.updateFailed = False dateStr = date.strftime('%Y-%m-%d') c = self.conn.cursor() try: xbmc.log('[script.brguide] Updating caches...', xbmc.LOGDEBUG) if progress_callback: progress_callback(0) if self.settingsChanged: c.execute('DELETE FROM channels WHERE source=?', [self.source.KEY]) c.execute('DELETE FROM programs WHERE source=?', [self.source.KEY]) c.execute("DELETE FROM updates WHERE source=?", [self.source.KEY]) self.settingsChanged = False # only want to update once due to changed settings if clearExistingProgramList: c.execute( "DELETE FROM updates WHERE source=?", [self.source.KEY ]) # cascades and deletes associated programs records else: c.execute( "DELETE FROM updates WHERE source=? AND date=?", [self.source.KEY, dateStr ]) # cascades and deletes associated programs records # programs updated c.execute( "INSERT INTO updates(source, date, programs_updated) VALUES(?, ?, ?)", [self.source.KEY, dateStr, datetime.datetime.now()]) updatesId = c.lastrowid imported = imported_channels = imported_programs = 0 for item in self.source.getDataFromExternal( date, progress_callback): imported += 1 if imported % 10000 == 0: self.conn.commit() if isinstance(item, Channel): imported_channels += 1 channel = item c.execute( 'INSERT OR IGNORE INTO channels(id, title, logo, stream_url, visible, weight, source) VALUES(?, ?, ?, ?, ?, (CASE ? WHEN -1 THEN (SELECT COALESCE(MAX(weight)+1, 0) FROM channels WHERE source=?) ELSE ? END), ?)', [ channel.id, channel.title, channel.logo, channel.streamUrl, channel.visible, channel.weight, self.source.KEY, channel.weight, self.source.KEY ]) if not c.rowcount: c.execute( 'UPDATE channels SET title=?, logo=?, stream_url=?, visible=(CASE ? WHEN -1 THEN visible ELSE ? END), weight=(CASE ? WHEN -1 THEN weight ELSE ? END) WHERE id=? AND source=?', [ channel.title, channel.logo, channel.streamUrl, channel.weight, channel.visible, channel.weight, channel.weight, channel.id, self.source.KEY ]) elif isinstance(item, Program): imported_programs += 1 program = item if isinstance(program.channel, Channel): channel = program.channel.id else: channel = program.channel c.execute( 'INSERT INTO programs(channel, title, start_date, end_date, description, image_large, image_small, source, updates_id) VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?)', [ channel, program.title, program.startDate, program.endDate, program.description, program.imageLarge, program.imageSmall, self.source.KEY, updatesId ]) # channels updated c.execute("UPDATE sources SET channels_updated=? WHERE id=?", [datetime.datetime.now(), self.source.KEY]) self.conn.commit() if imported_channels == 0 or imported_programs == 0: self.updateFailed = True except SourceUpdateCanceledException: # force source update on next load c.execute('UPDATE sources SET channels_updated=? WHERE id=?', [0, self.source.KEY]) c.execute("DELETE FROM updates WHERE source=?", [self.source.KEY ]) # cascades and deletes associated programs records self.conn.commit() except Exception: import traceback as tb import sys (etype, value, traceback) = sys.exc_info() tb.print_exception(etype, value, traceback) try: self.conn.rollback() except sqlite3.OperationalError: pass # no transaction is active try: # invalidate cached data c.execute('UPDATE sources SET channels_updated=? WHERE id=?', [0, self.source.KEY]) self.conn.commit() except sqlite3.OperationalError: pass # database is locked self.updateFailed = True finally: self.updateInProgress = False c.close()
start = sys.argv[2] ADDON = xbmcaddon.Addon(id='script.tvguide.fullscreen') def adapt_datetime(ts): # http://docs.python.org/2/library/sqlite3.html#registering-an-adapter-callable return time.mktime(ts.timetuple()) def convert_datetime(ts): try: return datetime.datetime.fromtimestamp(float(ts)) except ValueError: return None sqlite3.register_adapter(datetime.datetime, adapt_datetime) sqlite3.register_converter('timestamp', convert_datetime) path = xbmc.translatePath('special://profile/addon_data/script.tvguide.fullscreen/source.db') try: conn = sqlite3.connect(path, detect_types=sqlite3.PARSE_DECLTYPES) conn.row_factory = sqlite3.Row except Exception as detail: xbmc.log("EXCEPTION: (script.tvguide.fullscreen) %s" % detail, xbmc.LOGERROR) # Get the Program Info from the database c = conn.cursor() startDate = datetime.datetime.fromtimestamp(float(start)) c.execute('SELECT DISTINCT * FROM programs WHERE channel=? AND start_date = ?', [channel,startDate]) for row in c: title = row["title"] endDate = row["end_date"] duration = endDate - startDate
res = "☆%08d☆\n%s★%08d★\n%s\n"%(count, string+'\n', count, string+'\n') ''' res = "○%08d○%s○\n%s●%08d●%s●\n%s\n" % (count, name, string + '\n', count, name, string + '\n') ''' res = "%s\n"%(string+'\n') ''' return res #添加STRING类型 def convert_string(bstring): return bstring.decode('utf8') sqlite3.register_converter("STRING", convert_string) #dump两列数据制作成列表或字典 def DumpCol(con, cmd, mode): c = con.cursor() c.execute(cmd) name_list = [] for row in c: name_list.append([row[0], row[1]]) if mode == 'l': return name_list elif mode == 'd': name_dic = {} for [i, name] in name_list: name_dic[i] = name
import sqlite3 from sqlite3 import OperationalError, InterfaceError from textwrap import dedent from PIL import Image import unidecode import yaml from bunch import Bunch from datetime import datetime from subprocess import DEVNULL, STDOUT, check_call import tempfile from urllib.request import urlretrieve re_sp = re.compile(r"\s+") sqlite3.register_converter("BOOLEAN", lambda x: int(x) > 0) #sqlite3.register_converter("DATE", lambda x: datetime.strptime(str(x), "%Y-%m-%d").date()) sqlite3.enable_callback_tracebacks(True) def dict_factory(cursor, row): d = {} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d def bunch_factory(cursor, row): d = dict_factory(cursor, row) return Bunch(**d) def one_factory(cursor, row): return row[0]
SCHEMA_FILENAME = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'schema.sql', ) EVENT_ID_TYPE_MAP = { 0: ActionMonitoringTriggeredEvent, 1: ActionClaimRewardTriggeredEvent, } EVENT_TYPE_ID_MAP = {v: k for k, v in EVENT_ID_TYPE_MAP.items()} def convert_hex(raw: bytes) -> int: return int(raw, 16) sqlite3.register_converter('HEX_INT', convert_hex) class SharedDatabase: """ DB shared by MS and request collector """ def __init__(self, filename: str, allow_create: bool = False): log.info('Opening database', filename=filename) if filename != ':memory:' and os.path.dirname(filename): os.makedirs(os.path.dirname(filename), exist_ok=True) mode = 'rwc' if allow_create else 'rw' self.conn = sqlite3.connect( f'file:{filename}?mode={mode}', detect_types=sqlite3.PARSE_DECLTYPES, uri=True, isolation_level= None, # Disable sqlite3 module’s implicit transaction management
@memoized def adapt_datetype(ts): return int(time.mktime(ts.timetuple())) @memoized def convert_datetype(klass, s): return klass.fromtimestamp(float(s)) for klass, prefix in [(datetime.datetime, 'datetime'), (datetime.date, 'date')]: convert = functools.partial(convert_datetype, klass) sqlite3.register_adapter(klass, adapt_datetype) sqlite3.register_converter('py%s' % prefix, convert) conn = sqlite3.connect('mining.sqlite', detect_types=sqlite3.PARSE_DECLTYPES) conn.row_factory = sqlite3.Row def get_cursor(): return conn.cursor() c = get_cursor() import_schema = [ "DROP TABLE IF EXISTS sections;", """CREATE TABLE sections ( name CHARACTER VARYING(20) PRIMARY KEY
def connect(name: str, debug: bool = False, version: int = -1) -> ConnectionPlus: """ Connect or create database. If debug the queries will be echoed back. This function takes care of registering the numpy/sqlite type converters that we need. Args: name: name or path to the sqlite file debug: whether or not to turn on tracing version: which version to create. We count from 0. -1 means 'latest'. Should always be left at -1 except when testing. Returns: conn: connection object to the database (note, it is `ConnectionPlus`, not `sqlite3.Connection` """ # register numpy->binary(TEXT) adapter # the typing here is ignored due to what we think is a flaw in typeshed # see https://github.com/python/typeshed/issues/2429 sqlite3.register_adapter(np.ndarray, _adapt_array) # register binary(TEXT) -> numpy converter # for some reasons mypy complains about this sqlite3.register_converter("array", _convert_array) sqlite3_conn = sqlite3.connect(name, detect_types=sqlite3.PARSE_DECLTYPES, check_same_thread=True) conn = ConnectionPlus(sqlite3_conn) latest_supported_version = _latest_available_version() db_version = get_user_version(conn) if db_version > latest_supported_version: raise RuntimeError(f"Database {name} is version {db_version} but this " f"version of QCoDeS supports up to " f"version {latest_supported_version}") # sqlite3 options conn.row_factory = sqlite3.Row # Make sure numpy ints and floats types are inserted properly for numpy_int in [ np.int, np.int8, np.int16, np.int32, np.int64, np.uint, np.uint8, np.uint16, np.uint32, np.uint64 ]: sqlite3.register_adapter(numpy_int, int) sqlite3.register_converter("numeric", _convert_numeric) for numpy_float in [np.float, np.float16, np.float32, np.float64]: sqlite3.register_adapter(numpy_float, _adapt_float) for complex_type in complex_types: sqlite3.register_adapter(complex_type, _adapt_complex) sqlite3.register_converter("complex", _convert_complex) if debug: conn.set_trace_callback(print) init_db(conn) perform_db_upgrade(conn, version=version) return conn
def setup_decimal_support(): """Setup sqlite3 to support conversions to/from Decimal numbers. """ dbapi.register_adapter(Decimal, adapt_decimal) dbapi.register_converter("decimal", convert_decimal)
# Brendan Albert # CPSC 449- Backend Engineering import flask import json import uuid from flask import request, jsonify, g, make_response, render_template import sqlite3 app = flask.Flask(__name__) app.config.from_envvar('APP_CONFIG') track_shard_db_names = ['TRACKS_SHARD1','TRACKS_SHARD2','TRACKS_SHARD3'] db_context_names = ['_trackshard1', '_trackshard2', '_trackshard3', '_database'] sqlite3.register_converter('GUID', lambda b: uuid.UUID(bytes_le=b)) #called right before any request to establish db connection #connection saved globally in 'g' @app.before_request def connect_to_db(): g._trackshard1 = get_db(track_shard_db_names[0]) g._trackshard2 = get_db(track_shard_db_names[1]) g._trackshard3 = get_db(track_shard_db_names[2]) g.db = get_db('users_playlists_descriptions') def make_dicts(cursor, row): return dict((cursor.description[idx][0], value) for idx, value in enumerate(row)) def get_db(db_name):