def getLocalDb(): global localDb if localDb is None: localDb = DB(f"{path}/clearer.{name}.sqlite3") if doDebug: localDb.echo= True return localDb
def download(self): """Download a database from the server. If instead it receives "upgradeRequired", a message stating to go on ankiweb is shown. Otherwise, if the downloaded db has no card while current collection has card, it returns "downloadClobber". Otherwise, the downloaded database replace the collection's database. It also change message to "Downloading for AnkiWeb...". """ runHook("sync", "download") #whether the collection has at least one card. localNotEmpty = self.col.db.scalar("select 1 from cards") self.col.close() cont = self.req("download") tpath = self.col.path + ".tmp" if cont == "upgradeRequired": runHook("sync", "upgradeRequired") return open(tpath, "wb").write(cont) # check the received file is ok d = DB(tpath) assert d.scalar("pragma integrity_check") == "ok" remoteEmpty = not d.scalar("select 1 from cards") d.close() # accidental clobber? if localNotEmpty and remoteEmpty: os.unlink(tpath) return "downloadClobber" # overwrite existing collection os.unlink(self.col.path) os.rename(tpath, self.col.path) self.col = None
def _loadMeta(self): path = os.path.join(self.base, "prefs.db") new = not os.path.exists(path) self.db = DB(path, text=str) self.db.execute(""" create table if not exists profiles (name text primary key, data text not null);""") if not new: # load previously created try: self.meta = cPickle.loads( self.db.scalar( "select data from profiles where name = '_global'")) return except: # if we can't load profile, start with a new one os.rename(path, path + ".broken") return self._loadMeta() # create a default global profile self.meta = metaConf.copy() self.db.execute( "insert or replace into profiles values ('_global', ?)", cPickle.dumps(metaConf)) self._setDefaultLang() return True
def download(self) -> Optional[str]: hooks.sync_stage_did_change("download") localNotEmpty = self.col.db.scalar("select 1 from cards") self.col.close(downgrade=False) cont = self.req("download") tpath = self.col.path + ".tmp" if cont == "upgradeRequired": hooks.sync_stage_did_change("upgradeRequired") return None with open(tpath, "wb") as file: file.write(cont) # check the received file is ok d = DB(tpath) assert d.scalar("pragma integrity_check") == "ok" remoteEmpty = not d.scalar("select 1 from cards") d.close() # accidental clobber? if localNotEmpty and remoteEmpty: os.unlink(tpath) return "downloadClobber" # overwrite existing collection os.unlink(self.col.path) os.rename(tpath, self.col.path) self.col = None return None
def operation_upload(self, col, data, session): # Verify integrity of the received database file before replacing our # existing db. temp_db_path = session.get_collection_path() + ".tmp" with open(temp_db_path, 'wb') as f: f.write(data) try: test_db = DB(temp_db_path) if test_db.scalar("pragma integrity_check") != "ok": raise HTTPBadRequest("Integrity check failed for uploaded " "collection database file.") test_db.close() except sqlite.Error as e: raise HTTPBadRequest("Uploaded collection database file is " "corrupt.") # Overwrite existing db. col.close() try: os.rename(temp_db_path, session.get_collection_path()) finally: col.reopen() # If everything went fine, run hook_upload if one is defined. if self.hook_upload is not None: self.hook_upload(col, session) return True
def upload(self, col: Collection, data: bytes, session) -> str: """ Uploads a sqlite database from the client to the sync server. :param anki.collection.Collectio col: :param bytes data: The binary sqlite database from the client. :param .sync_app.SyncUserSession session: The current session. """ # Verify integrity of the received database file before replacing our # existing db. temp_db_path = session.get_collection_path() + ".tmp" with open(temp_db_path, 'wb') as f: f.write(data) try: with DB(temp_db_path) as test_db: self.test_db(test_db) except sqlite.Error as e: raise HTTPBadRequest("Uploaded collection database file is " "corrupt.") # Overwrite existing db. col.close() try: shutil.copyfile(temp_db_path, session.get_collection_path()) finally: col.reopen() # Reopen the media database col.media.connect() return "OK"
def _loadMeta(self): path = os.path.join(self.base, "prefs.db") new = not os.path.exists(path) self.db = DB(path, text=str) def recover(): # if we can't load profile, start with a new one os.rename(path, path+".broken") QMessageBox.warning( None, "Preferences Corrupt", """\ Anki's prefs.db file was corrupt and has been recreated. If you were using multiple \ profiles, please add them back using the same names to recover your cards.""") try: self.db.execute(""" create table if not exists profiles (name text primary key, data text not null);""") except: recover() return self._loadMeta() if not new: # load previously created try: self.meta = cPickle.loads( self.db.scalar( "select data from profiles where name = '_global'")) return except: recover() return self._loadMeta() # create a default global profile self.meta = metaConf.copy() self.db.execute("insert or replace into profiles values ('_global', ?)", cPickle.dumps(metaConf)) self._setDefaultLang() return True
def Collection(path, lock=True, server=False, sync=True): "Open a new or existing collection. Path must be unicode." assert path.endswith(".anki2") path = os.path.abspath(path) create = not os.path.exists(path) if create: base = os.path.basename(path) for c in ("/", ":", "\\"): assert c not in base # connect db = DB(path) if create: ver = _createDB(db) else: ver = _upgradeSchema(db) db.execute("pragma temp_store = memory") if sync: db.execute("pragma cache_size = 10000") db.execute("pragma journal_mode = wal") else: db.execute("pragma synchronous = off") # add db to col and do any remaining upgrades col = _Collection(db, server) if ver < SCHEMA_VERSION: _upgrade(col, ver) elif create: # add in reverse order so basic is default addClozeModel(col) addForwardOptionalReverse(col) addForwardReverse(col) addBasicModel(col) col.save() if lock: col.lock() return col
def Deck(path, queue=True, lock=True): "Open a new or existing deck. Path must be unicode." path = os.path.abspath(path) create = not os.path.exists(path) if create: base = os.path.basename(path) for c in ("/", ":", "\\"): assert c not in base # connect db = DB(path) if create: ver = _createDB(db) else: ver = _upgradeSchema(db) db.execute("pragma cache_size = 20000") # add db to deck and do any remaining upgrades deck = _Deck(db) if ver < CURRENT_VERSION: _upgradeDeck(deck, ver) elif create: deck.addModel(BasicModel(deck)) deck.addModel(ClozeModel(deck)) # default to basic deck.conf['currentModelId'] = 1 deck.save() if lock: deck.lock() if not queue: return deck # rebuild queue deck.reset() return deck
def _loadMeta(self, retrying=False) -> LoadMetaResult: result = LoadMetaResult() result.firstTime = False result.loadError = retrying opath = os.path.join(self.base, "prefs.db") path = os.path.join(self.base, "prefs21.db") if not retrying and os.path.exists(opath) and not os.path.exists(path): shutil.copy(opath, path) result.firstTime = not os.path.exists(path) def recover(): # if we can't load profile, start with a new one if self.db: try: self.db.close() except: pass for suffix in ("", "-journal"): fpath = path + suffix if os.path.exists(fpath): os.unlink(fpath) # open DB file and read data try: self.db = DB(path) assert self.db.scalar("pragma integrity_check") == "ok" self.db.execute(""" create table if not exists profiles (name text primary key, data text not null);""") data = self.db.scalar( "select cast(data as blob) from profiles where name = '_global'" ) except: if result.loadError: # already failed, prevent infinite loop raise # delete files and try again recover() return self._loadMeta(retrying=True) # try to read data if not result.firstTime: try: self.meta = self._unpickle(data) return result except: print("resetting corrupt _global") result.loadError = True result.firstTime = True # if new or read failed, create a default global profile self.meta = metaConf.copy() self.db.execute( "insert or replace into profiles values ('_global', ?)", self._pickle(metaConf), ) return result
def connect(self): if self.col.server: return path = self.dir() + ".db" create = not os.path.exists(path) self.db = DB(path) if create: self._initDB()
def connect(self): path = self.dir() + ".db2" create = not os.path.exists(path) os.chdir(self._dir) self.db = DB(path) if create: self._initDB() self.maybeUpgrade()
def upgrade(self): assert self.tmppath self.db = DB(self.tmppath) self._upgradeSchema() self.col = _Collection(self.db) self._upgradeRest() self.tmppath = None return self.col
def _loadMeta(self): """ Copy prefs to prefs21.db if prefs exists only for 2.0 Create a new profile and an error message if prefs21.db has a problem. if no preference database exists, create it, and create a global profile in it using current meta. put database of preferences in self.db Put the _global preferences in self.meta todo: explain call to _setDefaultLang """ opath = os.path.join(self.base, "prefs.db") path = os.path.join(self.base, "prefs21.db") if os.path.exists(opath) and not os.path.exists(path): shutil.copy(opath, path) new = not os.path.exists(path) def recover(): # if we can't load profile, start with a new one if self.db: try: self.db.close() except: pass for suffix in ("", "-journal"): fpath = path + suffix if os.path.exists(fpath): os.unlink(fpath) QMessageBox.warning( None, "Preferences Corrupt", """\ Anki's prefs21.db file was corrupt and has been recreated. If you were using multiple \ profiles, please add them back using the same names to recover your cards.""") try: self.db = DB(path) assert self.db.scalar("pragma integrity_check") == "ok" self.db.execute(""" create table if not exists profiles (name text primary key, data text not null);""") data = self.db.scalar( "select cast(data as blob) from profiles where name = '_global'" ) except: recover() return self._loadMeta() if not new: # load previously created data try: self.meta = self._unpickle(data) return except: print("resetting corrupt _global") # create a default global profile self.meta = metaConf.copy() self.db.execute( "insert or replace into profiles values ('_global', ?)", self._pickle(metaConf)) self._setDefaultLang() return True
def connect(self) -> None: if self.col.server: return path = self.dir()+".db2" create = not os.path.exists(path) os.chdir(self._dir) self.db = DB(path) if create: self._initDB() self.maybeUpgrade()
def connect(self): """Ensure the existence of a database in current format, connected in self.db.""" if self.col.server: return path = self.dir()+".db2" create = not os.path.exists(path) os.chdir(self._dir) self.db = DB(path) if create: self._initDB() self.maybeUpgrade()
def download(self): runHook("sync", "download") self.col.close() cont = self.req("download") tpath = self.col.path + ".tmp" open(tpath, "wb").write(cont) # check the received file is ok d = DB(tpath) assert d.scalar("pragma integrity_check") == "ok" d.close() # overwrite existing collection os.unlink(self.col.path) os.rename(tpath, self.col.path) self.col = None
def Collection(path: str, lock: bool = True, server: Optional[ServerData] = None, log: bool = False) -> _Collection: "Open a new or existing collection. Path must be unicode." assert path.endswith(".anki2") (media_dir, media_db) = media_paths_from_col_path(path) log_path = "" if not server: log_path = path.replace(".anki2", "2.log") backend = RustBackend(path, media_dir, media_db, log_path) path = os.path.abspath(path) create = not os.path.exists(path) if create: base = os.path.basename(path) for c in ("/", ":", "\\"): assert c not in base # connect db = DB(path) db.setAutocommit(True) if create: ver = _createDB(db) else: ver = _upgradeSchema(db) db.execute("pragma temp_store = memory") db.execute("pragma cache_size = 10000") if not isWin: db.execute("pragma journal_mode = wal") db.setAutocommit(False) # add db to col and do any remaining upgrades col = _Collection(db, backend=backend, server=server, log=log) if ver < SCHEMA_VERSION: _upgrade(col, ver) elif ver > SCHEMA_VERSION: raise Exception("This file requires a newer version of Anki.") elif create: # add in reverse order so basic is default addClozeModel(col) addBasicTypingModel(col) addForwardOptionalReverse(col) addForwardReverse(col) addBasicModel(col) col.save() if lock: try: col.lock() except: col.db.close() raise return col
def Collection(path: str, lock: bool = True, server: Optional[ServerData] = None, log: bool = False) -> _Collection: "Open a new or existing collection. Path must be unicode." backend = Backend(path) # fixme: this call is temporarily here to ensure the brige is working # on all platforms, and should be removed in a future beta assert backend.plus_one(5) == 6 assert path.endswith(".anki2") path = os.path.abspath(path) create = not os.path.exists(path) if create: base = os.path.basename(path) for c in ("/", ":", "\\"): assert c not in base # connect db = DB(path) db.setAutocommit(True) if create: ver = _createDB(db) else: ver = _upgradeSchema(db) db.execute("pragma temp_store = memory") db.execute("pragma cache_size = 10000") if not isWin: db.execute("pragma journal_mode = wal") db.setAutocommit(False) # add db to col and do any remaining upgrades col = _Collection(db, backend=backend, server=server, log=log) if ver < SCHEMA_VERSION: _upgrade(col, ver) elif ver > SCHEMA_VERSION: raise Exception("This file requires a newer version of Anki.") elif create: # add in reverse order so basic is default addClozeModel(col) addBasicTypingModel(col) addForwardOptionalReverse(col) addForwardReverse(col) addBasicModel(col) col.save() if lock: try: col.lock() except: col.db.close() raise return col
def load(self): path = self._dbPath() self.db = DB(path, text=str) self.db.executescript(""" create table if not exists decks (path text primary key); create table if not exists config (conf text not null); """) conf = self.db.scalar("select conf from config") if conf: self._conf.update(cPickle.loads(conf)) else: self._conf.update(defaultConf) # ensure there's something to update self.db.execute("insert or ignore into config values ('')") self._addDefaults()
def __init__(self): base_path = os.path.dirname(os.path.abspath(__file__)) db_path = os.path.join(base_path, "..", "user_files", "changelog.db") need_create = not os.path.exists(db_path) self.db = DB(db_path) self.db.setAutocommit(True) if need_create: self._create_tables() self._create_indices() self.db.setAutocommit(False) max_id = self.db.scalar("select max(id) from changelog") if max_id is not None: self.next_id = max_id + 1 else: self.next_id = 0
def check(self, path): "Returns 'ok', 'invalid', or log of fixes applied." # copy into a temp file before we open self.tmppath = tmpfile(suffix=".anki2") shutil.copy(path, self.tmppath) # run initial check with DB(self.tmppath) as db: res = self._check(db) # needs fixing? if res not in ("ok", "invalid"): res = self._fix(self.tmppath) # don't allow .upgrade() if invalid if res == "invalid": os.unlink(self.tmppath) self.tmppath = None return res
def downgrade(self, profiles=List[str]) -> List[str]: "Downgrade all profiles. Return a list of profiles that couldn't be opened." problem_profiles = [] for name in profiles: path = os.path.join(self.base, name, "collection.anki2") if not os.path.exists(path): continue with DB(path) as db: if db.scalar("select ver from col") == 11: # nothing to do continue try: c = Collection(path) c.close(save=False, downgrade=True) except Exception as e: print(e) problem_profiles.append(name) return problem_profiles
def _loadMeta(self): path = os.path.join(self.base, "prefs.db") new = not os.path.exists(path) self.db = DB(path, text=str) self.db.execute(""" create table if not exists profiles (name text primary key, data text not null);""") if new: # create a default global profile self.meta = metaConf.copy() self.db.execute("insert into profiles values ('_global', ?)", cPickle.dumps(metaConf)) self._setDefaultLang() return True else: # load previously created self.meta = cPickle.loads( self.db.scalar( "select data from profiles where name = '_global'"))
def Collection(path, lock=True, server=False, log=False): """Open a new or existing collection. Path must be unicode. server -- always False in anki without add-on. log -- Boolean stating whether log must be made in the file, with same name than the collection, but ending in .log. """ assert path.endswith(".anki2") path = os.path.abspath(path) create = not os.path.exists(path) if create: base = os.path.basename(path) for c in ("/", ":", "\\"): assert c not in base # connect db = DB(path) db.setAutocommit(True) if create: ver = _createDB(db) else: ver = _upgradeSchema(db) db.execute("pragma temp_store = memory") db.execute("pragma cache_size = 10000") if not isWin: db.execute("pragma journal_mode = wal") db.setAutocommit(False) # add db to col and do any remaining upgrades col = _Collection(db, server, log) if ver < SCHEMA_VERSION: _upgrade(col, ver) elif ver > SCHEMA_VERSION: raise Exception("This file requires a newer version of Anki.") elif create: # add in reverse order so basic is default addClozeModel(col) addBasicTypingModel(col) addForwardOptionalReverse(col) addForwardReverse(col) addBasicModel(col) col.save() if lock: col.lock() return col
def _loadMeta(self): opath = os.path.join(self.base, "prefs.db") path = os.path.join(self.base, "prefs21.db") if os.path.exists(opath) and not os.path.exists(path): shutil.copy(opath, path) new = not os.path.exists(path) def recover(): # if we can't load profile, start with a new one if self.db: try: self.db.close() except: pass os.unlink(path) QMessageBox.warning( None, "Preferences Corrupt", """\ Anki's prefs21.db file was corrupt and has been recreated. If you were using multiple \ profiles, please add them back using the same names to recover your cards.""") try: self.db = DB(path) self.db.execute(""" create table if not exists profiles (name text primary key, data text not null);""") data = self.db.scalar( "select cast(data as blob) from profiles where name = '_global'") except: recover() return self._loadMeta() if not new: # load previously created data try: self.meta = self._unpickle(data) return except: print("resetting corrupt _global") # create a default global profile self.meta = metaConf.copy() self.db.execute("insert or replace into profiles values ('_global', ?)", self._pickle(metaConf)) self._setDefaultLang() return True
def Collection(path, lock=True, server=False, log=False): "Open a new or existing collection. Path must be unicode." assert path.endswith(".anki2") path = os.path.abspath(path) create = not os.path.exists(path) if create: base = os.path.basename(path) for c in ("/", ":", "\\"): assert c not in base # connect db = DB(path) db.setAutocommit(True) if create: ver = _createDB(db) elif db.scalar("select ver from col") > 11: db.setAutocommit(False) raise Exception("invalidColVersion") else: ver = _upgradeSchema(db) db.execute("pragma temp_store = memory") db.execute("pragma cache_size = 10000") if not isWin: db.execute("pragma journal_mode = wal") db.setAutocommit(False) # add db to col and do any remaining upgrades col = _ExtCollection(db, server, log) if ver < SCHEMA_VERSION: _upgrade(col, ver) elif ver > SCHEMA_VERSION: raise Exception("This file requires a newer version of Anki.") elif create: # add in reverse order so basic is default addClozeModel(col) addBasicTypingModel(col) addForwardOptionalReverse(col) addForwardReverse(col) addBasicModel(col) col.save() if lock: col.lock() return col
def download(self): localNotEmpty = self.col.db.scalar("select 1 from cards") self.col.close() cont = self.req("download") tpath = self.col.path + ".tmp" if cont == "upgradeRequired": return open(tpath, "wb").write(cont) # check the received file is ok d = DB(tpath) assert d.scalar("pragma integrity_check") == "ok" remoteEmpty = not d.scalar("select 1 from cards") d.close() # accidental clobber? if localNotEmpty and remoteEmpty: os.unlink(tpath) return "downloadClobber" # overwrite existing collection os.unlink(self.col.path) os.rename(tpath, self.col.path) self.col = None
def run(self): db = DB(self.file) ver = db.scalar( "select value from global_variables where key='version'") if not ver.startswith("Mnemosyne SQL 1") and ver not in ("2", "3"): self.log.append(_("File version unknown, trying import anyway.")) # gather facts into temp objects curid = None notes = {} note = None for _id, id, k, v in db.execute(""" select _id, id, key, value from facts f, data_for_fact d where f._id=d._fact_id"""): if id != curid: if note: # pylint: disable=unsubscriptable-object notes[note["_id"]] = note note = {"_id": _id} curid = id assert note note[k] = v if note: notes[note["_id"]] = note # gather cards front = [] frontback = [] vocabulary = [] cloze = {} for row in db.execute(""" select _fact_id, fact_view_id, tags, next_rep, last_rep, easiness, acq_reps+ret_reps, lapses, card_type_id from cards"""): # categorize note note = notes[row[0]] if row[1].endswith(".1"): if row[1].startswith("1.") or row[1].startswith("1::"): front.append(note) elif row[1].startswith("2.") or row[1].startswith("2::"): frontback.append(note) elif row[1].startswith("3.") or row[1].startswith("3::"): vocabulary.append(note) elif row[1].startswith("5.1"): cloze[row[0]] = note # check for None to fix issue where import can error out rawTags = row[2] if rawTags is None: rawTags = "" # merge tags into note tags = rawTags.replace(", ", "\x1f").replace(" ", "_") tags = tags.replace("\x1f", " ") if "tags" not in note: note["tags"] = [] note["tags"] += self.col.tags.split(tags) note["tags"] = self.col.tags.canonify(note["tags"]) # if it's a new card we can go with the defaults if row[3] == -1: continue # add the card c = ForeignCard() c.factor = int(row[5] * 1000) c.reps = row[6] c.lapses = row[7] # ivl is inferred in mnemosyne next, prev = row[3:5] c.ivl = max(1, (next - prev) // 86400) # work out how long we've got left rem = int((next - time.time()) / 86400) c.due = self.col.sched.today + rem # get ord m = re.search(r".(\d+)$", row[1]) assert m ord = int(m.group(1)) - 1 if "cards" not in note: note["cards"] = {} note["cards"][ord] = c self._addFronts(front) total = self.total self._addFrontBacks(frontback) total += self.total self._addVocabulary(vocabulary) self.total += total self._addCloze(cloze) self.total += total self.log.append( ngettext("%d note imported.", "%d notes imported.", self.total) % self.total)
def reopen(self) -> None: "Reconnect to DB (after changing threads, etc)." if not self.db: self.db = DB(self.path) self.media.connect() self._openLog()