def connect(self): try: logger.debug2(u"Connecting to sqlite db '%s'" % self._database) if not self._connection: self._connection = Connection(filename=self._database, flags=SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE | SQLITE_CONFIG_MULTITHREAD, vfs=None, statementcachesize=100) if not self._cursor: def rowtrace(cursor, row): valueSet = {} for rowDescription, current in izip( cursor.getdescription(), row): valueSet[rowDescription[0]] = current return valueSet self._cursor = self._connection.cursor() if not self._synchronous: self._cursor.execute('PRAGMA synchronous=OFF') self._cursor.execute('PRAGMA temp_store=MEMORY') self._cursor.execute('PRAGMA cache_size=5000') if self._databaseCharset.lower() in ('utf8', 'utf-8'): self._cursor.execute('PRAGMA encoding="UTF-8"') self._cursor.setrowtrace(rowtrace) return (self._connection, self._cursor) except Exception as connectionError: logger.warning("Problem connecting to SQLite databse: {!r}", connectionError) raise connectionError
def test_upgrade_dispersy(self): self.torrent_upgrader._update_dispersy() db_path = os.path.join(self.sqlite_path, u"dispersy.db") connection = Connection(db_path) cursor = connection.cursor() self.assertFalse(list(cursor.execute(u"SELECT * FROM community WHERE classification == 'SearchCommunity'"))) self.assertFalse(list(cursor.execute(u"SELECT * FROM community WHERE classification == 'MetadataCommunity'"))) cursor.close() connection.close()
def match_character_extracts(anime_database, character_extracts): """Match and merge characters using an anime database.""" next_reference_id = 1 reference_id_to_character = {} with Connection(anime_database) as connection: connection.createscalarfunction("lv_jaro", jaro, numargs=2, deterministic=True) cursor = connection.cursor() cursor.execute("delete from unmatched_character") for filename in tqdm(character_extracts): # pylint: disable=line-too-long with open_transcoded(filename, "r", errors="ignore") as character_fileobj: character_json = json.loads( character_fileobj.read().decode("utf8")) for character in tqdm(character_json): reference_id = next_reference_id reference_id_to_character[reference_id] = character character_names = \ list(chain.from_iterable(character["names"].values())) anime_names = [a["name"] for a in character["anime_roles"]] for character_name, anime_name in product( character_names, anime_names): cursor.execute( "insert into unmatched_character (" "character_name, normalized_character_name," "anime_name, normalized_anime_name, " "reference_id) values (?, ?, ?, ?, ?)", (character_name, normalize_character_name(character_name), anime_name, normalize_anime_name(anime_name), reference_id)) next_reference_id += 1 cursor.execute("REINDEX") for (reference_id_csv, ) in cursor.execute(MATCH_SQL): reference_ids = list(map(int, reference_id_csv.split(","))) characters = map(reference_id_to_character.get, reference_ids) merged_character = reduce(merge_character_metadata, characters) if is_sensitive_metadata(merged_character): print( f"Skipping character {merged_character['names']['en']} " f"due to sensitive metadata.", file=sys.stderr) continue yield merged_character
def create_anime_db(database, anime_extract): """Create an anime database from an anime extract.""" with open_transcoded(anime_extract, "r", errors="ignore") as anime_fileobj: anime_json = json.loads(anime_fileobj.read().decode("utf8")) with Connection(database) as connection: connection.createscalarfunction("lv_jaro", jaro, numargs=2, deterministic=True) cursor = connection.cursor() cursor.execute(SCHEMA_SQL) for anime_id, anime in enumerate(tqdm(anime_json), start=1): cursor.execute("insert into anime values (:anime_id)", {"anime_id": anime_id}) for name in anime["names"]: cursor.execute( "insert into anime_name (is_primary, anime_id, " "anime_name, normalized_anime_name) values (:is_primary, " ":anime_id, :anime_name, :normalized_anime_name)", { "is_primary": name["is_primary"], "anime_id": anime_id, "anime_name": name["name"], "normalized_anime_name": (normalize_anime_name(name["name"])) }) for character in anime["characters"]: cursor.execute( "insert into character_name (anime_id, character_name, " "normalized_character_name) values (:anime_id, " ":character_name, :normalized_character_name)", { "anime_id": anime_id, "character_name": character["name"], "normalized_character_name": (normalize_character_name(character["name"])) }) cursor.execute("REINDEX")
def __init__(self, filename): self.inst = Connection(filename, SQLITE_ACCESS_READ) self.version = "4.1.25-SQLite"
def setUp(self): self.connection = Connection(":memory:") self.engine = Generic(self.connection, self.dir_path, False, True) Base.setUp(self)
def setUp(self): self.connection = Connection(":memory:") self.engine = driver_factory(self.connection, self.dir_path, True, True) TestFactory.setUp(self)
def test_basic_fts(self): # {{{ from apsw import Connection from calibre.constants import plugins conn = Connection(':memory:') plugins.load_apsw_extension(conn, 'sqlite_extension')