def _check_deck_tree(self): decks = self.col.decks.all() decks.sort(key=operator.itemgetter("name")) names = set() for deck in decks: # two decks with the same name? if deck["name"] in names: print("fix duplicate deck name", deck["name"].encode("utf8")) deck["name"] += "%d" % intTime(1000) self.save(deck) # ensure no sections are blank if not all(deck["name"].split("::")): print("fix deck with missing sections", deck["name"].encode("utf8")) deck["name"] = "recovered%d" % intTime(1000) self.save(deck) # immediate parent must exist if "::" in deck["name"]: immediateParent = "::".join(deck["name"].split("::")[:-1]) if immediateParent not in names: print("fix deck with missing parent", deck["name"].encode("utf8")) self._ensure_parents(deck["name"]) names.add(immediateParent) names.add(deck["name"])
def sqlite3_for_download(path): path = os.path.abspath(path) if os.path.exists(path): raise Exception("This file must not exist!") base = os.path.basename(path) for c in ("/", ":", "\\"): assert c not in base db = DB(path) # db.isolation_level = None db.execute("pragma page_size = 4096") db.execute("pragma legacy_file_format = 0") db.execute("vacuum") db.executescript( pkgutil.get_data("djankiserv.assets.sql.sqlite3", "create_tables.sql").decode("utf-8")) db.executescript( pkgutil.get_data("djankiserv.assets.sql.sqlite3", "init_col_table.sql").decode("utf-8") % ({ "v": AnkiDataModelBase.VERSION, "s": intTime(1000) })) c = json.loads( pkgutil.get_data("djankiserv.assets.jsonfiles", "default_collection_conf.json").decode("utf-8")) g = json.loads( pkgutil.get_data("djankiserv.assets.jsonfiles", "default_deck.json").decode("utf-8")) g["mod"] = intTime() gc = json.loads( pkgutil.get_data("djankiserv.assets.jsonfiles", "default_deck_conf.json").decode("utf-8")) db.execute("update col set conf = ?, decks = ?, dconf = ?", json.dumps(c), json.dumps({"1": g}), json.dumps({"1": gc})) db.executescript( pkgutil.get_data("djankiserv.assets.sql.sqlite3", "add_indices.sql").decode("utf-8")) db.execute("analyze") db.execute("pragma temp_store = memory") db.execute("pragma cache_size = 10000") db.execute("pragma journal_mode = delete" ) # this forces flush to disk when closing the file: MUST KEEP # db.execute("pragma journal_mode = wal") # MUSTN'T use this # db.isolation_level = "" db.close()
def get_or_add(self, name, create=True, dtype=None): "Add a deck with NAME. Reuse deck if already exists. Return id as int." if dtype is None: dtype = json.loads( pkgutil.get_data("djankiserv.assets.jsonfiles", "default_deck.json").decode("utf-8")) name = name.replace('"', "") name = unicodedata.normalize("NFC", name) for did, g in list(self.decks.items()): if unicodedata.normalize("NFC", g["name"].lower()) == name.lower(): return int(did) if not create: return None g = copy.deepcopy(dtype) if "::" in name: # not top level; ensure all parents exist name = self._ensure_parents(name) g["name"] = name while 1: did = intTime(1000) if str(did) not in self.decks: break g["id"] = did self.decks[str(did)] = g self.save(g) self.maybe_add_to_active() return int(did)
def finish(self): mod = intTime(1000) self.col.ls = mod self.col.usn = self.max_usn + 1 # ensure we save the mod time even if no changes made self.col.db.mod = True self.col.save(mod=mod) return mod
def _lrnForDeck(self, did): cnt = int( self.col.db.scalar( f"""select sum(remaining/1000) from (select remaining from {self.col.username}.cards where did = %s and queue = 1 and due < %s limit %s) as foo""", did, intTime() + self.col.conf["collapseTime"], self.reportLimit, ) or 0) return cnt + self.col.db.scalar( f"""select count(0) from {self.col.username}.cards where did = %s and queue = 3 and due <= %s limit %s""", did, self.today, self.reportLimit, )
def save(self, g=None): "Can be called with either a deck or a deck configuration." if g: g["mod"] = intTime() g["usn"] = self.col.usn self.changed = True