Ejemplo n.º 1
0
def Deck(path, queue=True, lock=True):
    "Open a new or existing deck. Path must be unicode."
    path = os.path.abspath(path)
    create = not os.path.exists(path)
    if create:
        base = os.path.basename(path)
        for c in ("/", ":", "\\"):
            assert c not in base
    # connect
    db = DB(path)
    if create:
        ver = _createDB(db)
    else:
        ver = _upgradeSchema(db)
    db.execute("pragma cache_size = 20000")
    # add db to deck and do any remaining upgrades
    deck = _Deck(db)
    if ver < CURRENT_VERSION:
        _upgradeDeck(deck, ver)
    elif create:
        deck.addModel(BasicModel(deck))
        deck.addModel(ClozeModel(deck))
        # default to basic
        deck.conf['currentModelId'] = 1
        deck.save()
    if lock:
        deck.lock()
    if not queue:
        return deck
    # rebuild queue
    deck.reset()
    return deck
Ejemplo n.º 2
0
    def operation_upload(self, col, data, session):
        # Verify integrity of the received database file before replacing our
        # existing db.
        temp_db_path = session.get_collection_path() + ".tmp"
        with open(temp_db_path, "wb") as f:
            f.write(data)

        try:
            test_db = DB(temp_db_path)
            if test_db.scalar("pragma integrity_check") != "ok":
                raise HTTPBadRequest("Integrity check failed for uploaded " "collection database file.")
            test_db.close()
        except sqlite.Error as e:
            raise HTTPBadRequest("Uploaded collection database file is " "corrupt.")

        # Overwrite existing db.
        col.close()
        try:
            os.rename(temp_db_path, session.get_collection_path())
        finally:
            col.reopen()

        # If everything went fine, run hook_upload if one is defined.
        if self.hook_upload is not None:
            self.hook_upload(col, session)

        return True
Ejemplo n.º 3
0
 def download(self):
     runHook("sync", "download")
     self.col.close()
     cont = self.req("download")
     tpath = self.col.path + ".tmp"
     open(tpath, "wb").write(cont)
     # check the received file is ok
     d = DB(tpath)
     assert d.scalar("pragma integrity_check") == "ok"
     d.close()
     # overwrite existing collection
     os.unlink(self.col.path)
     os.rename(tpath, self.col.path)
     self.col = None
Ejemplo n.º 4
0
    def _loadMeta(self):
        path = os.path.join(self.base, "prefs.db")
        new = not os.path.exists(path)
        def recover():
            # if we can't load profile, start with a new one
            os.rename(path, path+".broken")
            QMessageBox.warning(
                None, "Preferences Corrupt", """\
Anki's prefs.db file was corrupt and has been recreated. If you were using multiple \
profiles, please add them back using the same names to recover your cards.""")
        try:
            self.db = DB(path, text=str)
            self.db.execute("""
create table if not exists profiles
(name text primary key, data text not null);""")
        except:
            recover()
            return self._loadMeta()
        if not new:
            # load previously created
            try:
                self.meta = cPickle.loads(
                    self.db.scalar(
                        "select data from profiles where name = '_global'"))
                return
            except:
                recover()
                return self._loadMeta()
        # create a default global profile
        self.meta = metaConf.copy()
        self.db.execute("insert or replace into profiles values ('_global', ?)",
                        cPickle.dumps(metaConf))
        self._setDefaultLang()
        return True
Ejemplo n.º 5
0
 def connect(self):
     if self.col.server:
         return
     path = self.dir()+".db"
     create = not os.path.exists(path)
     self.db = DB(path)
     if create:
         self._initDB()
Ejemplo n.º 6
0
def Collection(path, lock=True, server=False, sync=True):
    "Open a new or existing collection. Path must be unicode."
    assert path.endswith(".anki2")
    path = os.path.abspath(path)
    create = not os.path.exists(path)
    if create:
        base = os.path.basename(path)
        for c in ("/", ":", "\\"):
            assert c not in base
    # connect
    db = DB(path)
    if create:
        ver = _createDB(db)
    else:
        ver = _upgradeSchema(db)
    db.execute("pragma temp_store = memory")
    if sync:
        db.execute("pragma cache_size = 10000")
        db.execute("pragma journal_mode = wal")
    else:
        db.execute("pragma synchronous = off")
    # add db to col and do any remaining upgrades
    col = _Collection(db, server)
    if ver < SCHEMA_VERSION:
        _upgrade(col, ver)
    elif create:
        # add in reverse order so basic is default
        addClozeModel(col)
        addBasicModel(col)
        col.save()
    if lock:
        col.lock()
    return col
Ejemplo n.º 7
0
 def connect(self):
     if self.col.server:
         return
     path = self.dir()+".db2"
     create = not os.path.exists(path)
     os.chdir(self._dir)
     self.db = DB(path)
     if create:
         self._initDB()
     self.maybeUpgrade()
Ejemplo n.º 8
0
 def download(self):
     runHook("sync", "download")
     localNotEmpty = self.col.db.scalar("select 1 from cards")
     self.col.close()
     cont = self.req("download")
     tpath = self.col.path + ".tmp"
     if cont == "upgradeRequired":
         runHook("sync", "upgradeRequired")
         return
     open(tpath, "wb").write(cont)
     # check the received file is ok
     d = DB(tpath)
     assert d.scalar("pragma integrity_check") == "ok"
     remoteEmpty = not d.scalar("select 1 from cards")
     d.close()
     # accidental clobber?
     if localNotEmpty and remoteEmpty:
         os.unlink(tpath)
         return "downloadClobber"
     # overwrite existing collection
     os.unlink(self.col.path)
     os.rename(tpath, self.col.path)
     self.col = None
Ejemplo n.º 9
0
    def _loadMeta(self):
        opath = os.path.join(self.base, "prefs.db")
        path = os.path.join(self.base, "prefs21.db")
        if os.path.exists(opath) and not os.path.exists(path):
            shutil.copy(opath, path)

        new = not os.path.exists(path)
        def recover():
            # if we can't load profile, start with a new one
            if self.db:
                try:
                    self.db.close()
                except:
                    pass
            for suffix in ("", "-journal"):
                fpath = path + suffix
                if os.path.exists(fpath):
                    os.unlink(fpath)
            QMessageBox.warning(
                None, "Preferences Corrupt", """\
Anki's prefs21.db file was corrupt and has been recreated. If you were using multiple \
profiles, please add them back using the same names to recover your cards.""")
        try:
            self.db = DB(path)
            assert self.db.scalar("pragma integrity_check") == "ok"
            self.db.execute("""
create table if not exists profiles
(name text primary key, data text not null);""")
            data = self.db.scalar(
                "select cast(data as blob) from profiles where name = '_global'")
        except:
            recover()
            return self._loadMeta()
        if not new:
            # load previously created data
            try:
                self.meta = self._unpickle(data)
                return
            except:
                print("resetting corrupt _global")
        # create a default global profile
        self.meta = metaConf.copy()
        self.db.execute("insert or replace into profiles values ('_global', ?)",
                        self._pickle(metaConf))
        self._setDefaultLang()
        return True
Ejemplo n.º 10
0
    def _loadMeta(self):
        path = os.path.join(self.base, "prefs.db")
        new = not os.path.exists(path)
        self.db = DB(path, text=str)
        self.db.execute("""
create table if not exists profiles
(name text primary key, data text not null);""")
        if new:
            # create a default global profile
            self.meta = metaConf.copy()
            self.db.execute("insert into profiles values ('_global', ?)",
                            cPickle.dumps(metaConf))
            self._setDefaultLang()
            return True
        else:
            # load previously created
            self.meta = cPickle.loads(
                self.db.scalar(
                    "select data from profiles where name = '_global'"))
Ejemplo n.º 11
0
    def _loadMeta(self):
        path = os.path.join(self.base, "prefs.db")
        new = not os.path.exists(path)
        self.db = DB(path, text=str)
        self.db.execute(
            """
create table if not exists profiles
(name text primary key, data text not null);"""
        )
        if not new:
            # load previously created
            try:
                self.meta = cPickle.loads(self.db.scalar("select data from profiles where name = '_global'"))
                return
            except:
                # if we can't load profile, start with a new one
                os.rename(path, path + ".broken")
                return self._loadMeta()
        # create a default global profile
        self.meta = metaConf.copy()
        self.db.execute("insert or replace into profiles values ('_global', ?)", cPickle.dumps(metaConf))
        self._setDefaultLang()
        return True
Ejemplo n.º 12
0
 def download(self) -> Optional[str]:
     runHook("sync", "download")
     localNotEmpty = self.col.db.scalar("select 1 from cards")
     self.col.close()
     cont = self.req("download")
     tpath = self.col.path + ".tmp"
     if cont == "upgradeRequired":
         runHook("sync", "upgradeRequired")
         return None
     open(tpath, "wb").write(cont)
     # check the received file is ok
     d = DB(tpath)
     assert d.scalar("pragma integrity_check") == "ok"
     remoteEmpty = not d.scalar("select 1 from cards")
     d.close()
     # accidental clobber?
     if localNotEmpty and remoteEmpty:
         os.unlink(tpath)
         return "downloadClobber"
     # overwrite existing collection
     os.unlink(self.col.path)
     os.rename(tpath, self.col.path)
     self.col = None
     return None
Ejemplo n.º 13
0
def Collection(path, lock=True, server=False, log=False):
    """Open a new or existing collection. Path must be unicode.

    server -- always False in anki without add-on.
    log -- Boolean stating whether log must be made in the file, with same name than the collection, but ending in .log.
    """
    assert path.endswith(".anki2")
    path = os.path.abspath(path)
    create = not os.path.exists(path)
    if create:
        base = os.path.basename(path)
        for c in ("/", ":", "\\"):
            assert c not in base
    # connect
    db = DB(path)
    db.setAutocommit(True)
    if create:
        ver = _createDB(db)
    else:
        ver = _upgradeSchema(db)
    db.execute("pragma temp_store = memory")
    db.execute("pragma cache_size = 10000")
    if not isWin:
        db.execute("pragma journal_mode = wal")
    db.setAutocommit(False)
    # add db to col and do any remaining upgrades
    col = _Collection(db, server, log)
    if ver < SCHEMA_VERSION:
        _upgrade(col, ver)
    elif ver > SCHEMA_VERSION:
        raise Exception("This file requires a newer version of Anki.")
    elif create:
        # add in reverse order so basic is default
        addClozeModel(col)
        addBasicTypingModel(col)
        addForwardOptionalReverse(col)
        addForwardReverse(col)
        addBasicModel(col)
        col.save()
    if lock:
        col.lock()
    return col
Ejemplo n.º 14
0
class ProfileManager:
    def __init__(self, base=None):
        self.name = None
        self.db = None
        # instantiate base folder
        self._setBaseFolder(base)

    def setupMeta(self):
        # load metadata
        self.firstRun = self._loadMeta()

    # profile load on startup
    def openProfile(self, profile):
        if profile:
            if profile not in self.profiles():
                QMessageBox.critical(None, "Error",
                                     "Requested profile does not exist.")
                sys.exit(1)
            try:
                self.load(profile)
            except TypeError:
                raise Exception("Provided profile does not exist.")

    # Base creation
    ######################################################################

    def ensureBaseExists(self):
        try:
            self._ensureExists(self.base)
        except:
            # can't translate, as lang not initialized
            QMessageBox.critical(
                None, "Error", """\
Anki could not create the folder %s. Please ensure that location is not \
read-only and you have permission to write to it. If you cannot fix this \
issue, please see the documentation for information on running Anki from \
a flash drive.""" % self.base)
            raise

    # Folder migration
    ######################################################################

    def _oldFolderLocation(self):
        if isMac:
            return os.path.expanduser("~/Documents/Anki")
        elif isWin:
            from aqt.winpaths import get_personal
            return os.path.join(get_personal(), "Anki")
        else:
            p = os.path.expanduser("~/Anki")
            if os.path.isdir(p):
                return p
            return os.path.expanduser("~/Documents/Anki")

    def maybeMigrateFolder(self):
        oldBase = self._oldFolderLocation()

        if oldBase and not os.path.exists(
                self.base) and os.path.isdir(oldBase):
            shutil.move(oldBase, self.base)

    # Profile load/save
    ######################################################################

    def profiles(self):
        return sorted(x for x in self.db.list("select name from profiles")
                      if x != "_global")

    def _unpickle(self, data):
        class Unpickler(pickle.Unpickler):
            def find_class(self, module, name):
                fn = super().find_class(module, name)
                if module == "sip" and name == "_unpickle_type":

                    def wrapper(mod, obj, args):
                        if mod.startswith("PyQt4") and obj == "QByteArray":
                            # can't trust str objects from python 2
                            return QByteArray()
                        return fn(mod, obj, args)

                    return wrapper
                else:
                    return fn

        up = Unpickler(io.BytesIO(data), errors="ignore")
        return up.load()

    def _pickle(self, obj):
        return pickle.dumps(obj, protocol=0)

    def load(self, name):
        assert name != "_global"
        data = self.db.scalar(
            "select cast(data as blob) from profiles where name = ?", name)
        self.name = name
        try:
            self.profile = self._unpickle(data)
        except:
            print("resetting corrupt profile")
            self.profile = profileConf.copy()
            self.save()
        return True

    def save(self):
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, self._pickle(self.profile), self.name)
        self.db.execute(sql, self._pickle(self.meta), "_global")
        self.db.commit()

    def create(self, name):
        prof = profileConf.copy()
        self.db.execute("insert or ignore into profiles values (?, ?)", name,
                        self._pickle(prof))
        self.db.commit()

    def remove(self, name):
        p = self.profileFolder()
        if os.path.exists(p):
            send2trash(p)
        self.db.execute("delete from profiles where name = ?", name)
        self.db.commit()

    def trashCollection(self):
        p = self.collectionPath()
        if os.path.exists(p):
            send2trash(p)

    def rename(self, name):
        oldName = self.name
        oldFolder = self.profileFolder()
        self.name = name
        newFolder = self.profileFolder(create=False)
        if os.path.exists(newFolder):
            if (oldFolder != newFolder) and (oldFolder.lower()
                                             == newFolder.lower()):
                # OS is telling us the folder exists because it does not take
                # case into account; use a temporary folder location
                midFolder = ''.join([oldFolder, '-temp'])
                if not os.path.exists(midFolder):
                    os.rename(oldFolder, midFolder)
                    oldFolder = midFolder
                else:
                    showWarning(
                        _("Please remove the folder %s and try again.") %
                        midFolder)
                    self.name = oldName
                    return
            else:
                showWarning(_("Folder already exists."))
                self.name = oldName
                return

        # update name
        self.db.execute("update profiles set name = ? where name = ?", name,
                        oldName)
        # rename folder
        try:
            os.rename(oldFolder, newFolder)
        except WindowsError as e:
            self.db.rollback()
            if "Access is denied" in e:
                showWarning(
                    _("""\
Anki could not rename your profile because it could not rename the profile \
folder on disk. Please ensure you have permission to write to Documents/Anki \
and no other programs are accessing your profile folders, then try again."""))
            else:
                raise
        except:
            self.db.rollback()
            raise
        else:
            self.db.commit()

    # Folder handling
    ######################################################################

    def profileFolder(self, create=True):
        path = os.path.join(self.base, self.name)
        if create:
            self._ensureExists(path)
        return path

    def addonFolder(self):
        return self._ensureExists(os.path.join(self.base, "addons21"))

    def backupFolder(self):
        return self._ensureExists(os.path.join(self.profileFolder(),
                                               "backups"))

    def collectionPath(self):
        return os.path.join(self.profileFolder(), "collection.anki2")

    # Helpers
    ######################################################################

    def _ensureExists(self, path):
        if not os.path.exists(path):
            os.makedirs(path)
        return path

    def _setBaseFolder(self, cmdlineBase):
        if cmdlineBase:
            self.base = os.path.abspath(cmdlineBase)
        elif os.environ.get("ANKI_BASE"):
            self.base = os.path.abspath(os.environ["ANKI_BASE"])
        else:
            self.base = self._defaultBase()
            self.maybeMigrateFolder()
        self.ensureBaseExists()

    def _defaultBase(self):
        if isWin:
            from aqt.winpaths import get_appdata
            return os.path.join(get_appdata(), "Anki2")
        elif isMac:
            return os.path.expanduser("~/Library/Application Support/Anki2")
        else:
            dataDir = os.environ.get("XDG_DATA_HOME",
                                     os.path.expanduser("~/.local/share"))
            if not os.path.exists(dataDir):
                os.makedirs(dataDir)
            return os.path.join(dataDir, "Anki2")

    def _loadMeta(self):
        opath = os.path.join(self.base, "prefs.db")
        path = os.path.join(self.base, "prefs21.db")
        if os.path.exists(opath) and not os.path.exists(path):
            shutil.copy(opath, path)

        new = not os.path.exists(path)

        def recover():
            # if we can't load profile, start with a new one
            if self.db:
                try:
                    self.db.close()
                except:
                    pass
            for suffix in ("", "-journal"):
                fpath = path + suffix
                if os.path.exists(fpath):
                    os.unlink(fpath)
            QMessageBox.warning(
                None, "Preferences Corrupt", """\
Anki's prefs21.db file was corrupt and has been recreated. If you were using multiple \
profiles, please add them back using the same names to recover your cards.""")

        try:
            self.db = DB(path)
            assert self.db.scalar("pragma integrity_check") == "ok"
            self.db.execute("""
create table if not exists profiles
(name text primary key, data text not null);""")
            data = self.db.scalar(
                "select cast(data as blob) from profiles where name = '_global'"
            )
        except:
            recover()
            return self._loadMeta()
        if not new:
            # load previously created data
            try:
                self.meta = self._unpickle(data)
                return
            except:
                print("resetting corrupt _global")
        # create a default global profile
        self.meta = metaConf.copy()
        self.db.execute(
            "insert or replace into profiles values ('_global', ?)",
            self._pickle(metaConf))
        self._setDefaultLang()
        return True

    def ensureProfile(self):
        "Create a new profile if none exists."
        if self.firstRun:
            self.create(_("User 1"))
            p = os.path.join(self.base, "README.txt")
            open(p, "w").write(
                _("""\
This folder stores all of your Anki data in a single location,
to make backups easy. To tell Anki to use a different location,
please see:

%s
""") % (appHelpSite + "#startupopts"))

    # Default language
    ######################################################################
    # On first run, allow the user to choose the default language

    def _setDefaultLang(self):
        # the dialog expects _ to be defined, but we're running before
        # setupLang() has been called. so we create a dummy op for now
        import builtins
        builtins.__dict__['_'] = lambda x: x

        # create dialog
        class NoCloseDiag(QDialog):
            def reject(self):
                pass

        d = self.langDiag = NoCloseDiag()
        f = self.langForm = aqt.forms.setlang.Ui_Dialog()
        f.setupUi(d)
        d.accepted.connect(self._onLangSelected)
        d.rejected.connect(lambda: True)
        # default to the system language
        try:
            (lang, enc) = locale.getdefaultlocale()
        except:
            # fails on osx
            lang = "en"
        if lang and lang not in ("pt_BR", "zh_CN", "zh_TW"):
            lang = re.sub("(.*)_.*", "\\1", lang)
        # find index
        idx = None
        en = None
        for c, (name, code) in enumerate(anki.lang.langs):
            if code == "en":
                en = c
            if code == lang:
                idx = c
        # if the system language isn't available, revert to english
        if idx is None:
            idx = en
        # update list
        f.lang.addItems([x[0] for x in anki.lang.langs])
        f.lang.setCurrentRow(idx)
        d.exec_()

    def _onLangSelected(self):
        f = self.langForm
        obj = anki.lang.langs[f.lang.currentRow()]
        code = obj[1]
        name = obj[0]
        en = "Are you sure you wish to display Anki's interface in %s?"
        r = QMessageBox.question(None, "Anki", en % name,
                                 QMessageBox.Yes | QMessageBox.No,
                                 QMessageBox.No)
        if r != QMessageBox.Yes:
            return self._setDefaultLang()
        self.setLang(code)

    def setLang(self, code):
        self.meta['defaultLang'] = code
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, self._pickle(self.meta), "_global")
        self.db.commit()
        anki.lang.setLang(code, local=False)

    # OpenGL
    ######################################################################

    def _glPath(self):
        return os.path.join(self.base, "gldriver")

    def glMode(self):
        if isMac:
            return "auto"

        path = self._glPath()
        if not os.path.exists(path):
            return "software"

        mode = open(path, "r").read().strip()

        if mode == "angle" and isWin:
            return mode
        elif mode == "software":
            return mode
        return "auto"

    def setGlMode(self, mode):
        open(self._glPath(), "w").write(mode)

    def nextGlMode(self):
        mode = self.glMode()
        if mode == "software":
            self.setGlMode("auto")
        elif mode == "auto":
            if isWin:
                self.setGlMode("angle")
            else:
                self.setGlMode("software")
        elif mode == "angle":
            self.setGlMode("software")
Ejemplo n.º 15
0
class _Collection:
    db: Optional[DB]
    sched: Union[V1Scheduler, V2Scheduler]
    crt: int
    mod: int
    scm: int
    dty: bool  # no longer used
    _usn: int
    ls: int
    conf: Dict[str, Any]
    _undo: List[Any]
    backend: RustBackend

    def __init__(
        self,
        db: DB,
        backend: RustBackend,
        server: Optional["anki.storage.ServerData"] = None,
        log: bool = False,
    ) -> None:
        self.backend = backend
        self._debugLog = log
        self.db = db
        self.path = db._path
        self._openLog()
        self.log(self.path, anki.version)
        self.server = server
        self._lastSave = time.time()
        self.clearUndo()
        self.media = MediaManager(self, server is not None)
        self.models = ModelManager(self)
        self.decks = DeckManager(self)
        self.tags = TagManager(self)
        self.load()
        if not self.crt:
            d = datetime.datetime.today()
            d -= datetime.timedelta(hours=4)
            d = datetime.datetime(d.year, d.month, d.day)
            d += datetime.timedelta(hours=4)
            self.crt = int(time.mktime(d.timetuple()))
        self._loadScheduler()
        if not self.conf.get("newBury", False):
            self.conf["newBury"] = True
            self.setMod()

    def name(self) -> Any:
        n = os.path.splitext(os.path.basename(self.path))[0]
        return n

    # Scheduler
    ##########################################################################

    supportedSchedulerVersions = (1, 2)

    def schedVer(self) -> Any:
        ver = self.conf.get("schedVer", 1)
        if ver in self.supportedSchedulerVersions:
            return ver
        else:
            raise Exception("Unsupported scheduler version")

    def _loadScheduler(self) -> None:
        ver = self.schedVer()
        if ver == 1:
            self.sched = V1Scheduler(self)
        elif ver == 2:
            self.sched = V2Scheduler(self)
            if not self.server:
                self.conf["localOffset"] = self.sched._current_timezone_offset()
            elif self.server.minutes_west is not None:
                self.conf["localOffset"] = self.server.minutes_west

    def changeSchedulerVer(self, ver: int) -> None:
        if ver == self.schedVer():
            return
        if ver not in self.supportedSchedulerVersions:
            raise Exception("Unsupported scheduler version")

        self.modSchema(check=True)
        self.clearUndo()

        v2Sched = V2Scheduler(self)

        if ver == 1:
            v2Sched.moveToV1()
        else:
            v2Sched.moveToV2()

        self.conf["schedVer"] = ver
        self.setMod()

        self._loadScheduler()

    def localOffset(self) -> Optional[int]:
        "Minutes west of UTC. Only applies to V2 scheduler."
        if isinstance(self.sched, V1Scheduler):
            return None
        else:
            return self.sched._current_timezone_offset()

    # DB-related
    ##########################################################################

    def load(self) -> None:
        (
            self.crt,
            self.mod,
            self.scm,
            self.dty,  # no longer used
            self._usn,
            self.ls,
            conf,
            models,
            decks,
            dconf,
            tags,
        ) = self.db.first(
            """
select crt, mod, scm, dty, usn, ls,
conf, models, decks, dconf, tags from col"""
        )
        self.conf = json.loads(conf)  # type: ignore
        self.models.load(models)
        self.decks.load(decks, dconf)
        self.tags.load(tags)

    def setMod(self) -> None:
        """Mark DB modified.

DB operations and the deck/tag/model managers do this automatically, so this
is only necessary if you modify properties of this object or the conf dict."""
        self.db.mod = True

    def flush(self, mod: Optional[int] = None) -> None:
        "Flush state to DB, updating mod time."
        self.mod = intTime(1000) if mod is None else mod
        self.db.execute(
            """update col set
crt=?, mod=?, scm=?, dty=?, usn=?, ls=?, conf=?""",
            self.crt,
            self.mod,
            self.scm,
            self.dty,
            self._usn,
            self.ls,
            json.dumps(self.conf),
        )

    def save(self, name: Optional[str] = None, mod: Optional[int] = None) -> None:
        "Flush, commit DB, and take out another write lock."
        # let the managers conditionally flush
        self.models.flush()
        self.decks.flush()
        self.tags.flush()
        # and flush deck + bump mod if db has been changed
        if self.db.mod:
            self.flush(mod=mod)
            self.db.commit()
            self.lock()
            self.db.mod = False
        self._markOp(name)
        self._lastSave = time.time()

    def autosave(self) -> Optional[bool]:
        "Save if 5 minutes has passed since last save. True if saved."
        if time.time() - self._lastSave > 300:
            self.save()
            return True
        return None

    def lock(self) -> None:
        # make sure we don't accidentally bump mod time
        mod = self.db.mod
        self.db.execute("update col set mod=mod")
        self.db.mod = mod

    def close(self, save: bool = True) -> None:
        "Disconnect from DB."
        if self.db:
            if save:
                self.save()
            else:
                self.db.rollback()
            if not self.server:
                self.db.setAutocommit(True)
                self.db.execute("pragma journal_mode = delete")
                self.db.setAutocommit(False)
            self.db.close()
            self.db = None
            self.media.close()
            self._closeLog()

    def reopen(self) -> None:
        "Reconnect to DB (after changing threads, etc)."
        if not self.db:
            self.db = DB(self.path)
            self.media.connect()
            self._openLog()

    def rollback(self) -> None:
        self.db.rollback()
        self.load()
        self.lock()

    def modSchema(self, check: bool) -> None:
        "Mark schema modified. Call this first so user can abort if necessary."
        if not self.schemaChanged():
            if check and not hooks.schema_will_change(proceed=True):
                raise AnkiError("abortSchemaMod")
        self.scm = intTime(1000)
        self.setMod()

    def schemaChanged(self) -> Any:
        "True if schema changed since last sync."
        return self.scm > self.ls

    def usn(self) -> Any:
        return self._usn if self.server else -1

    def beforeUpload(self) -> None:
        "Called before a full upload."
        tbls = "notes", "cards", "revlog"
        for t in tbls:
            self.db.execute("update %s set usn=0 where usn=-1" % t)
        # we can save space by removing the log of deletions
        self.db.execute("delete from graves")
        self._usn += 1
        self.models.beforeUpload()
        self.tags.beforeUpload()
        self.decks.beforeUpload()
        self.modSchema(check=False)
        self.ls = self.scm
        # ensure db is compacted before upload
        self.db.setAutocommit(True)
        self.db.execute("vacuum")
        self.db.execute("analyze")
        self.close()

    # Object creation helpers
    ##########################################################################

    def getCard(self, id: int) -> Card:
        return Card(self, id)

    def getNote(self, id: int) -> Note:
        return Note(self, id=id)

    # Utils
    ##########################################################################

    def nextID(self, type: str, inc: bool = True) -> Any:
        type = "next" + type.capitalize()
        id = self.conf.get(type, 1)
        if inc:
            self.conf[type] = id + 1
        return id

    def reset(self) -> None:
        "Rebuild the queue and reload data after DB modified."
        self.sched.reset()

    # Deletion logging
    ##########################################################################

    def _logRem(self, ids: List[int], type: int) -> None:
        self.db.executemany(
            "insert into graves values (%d, ?, %d)" % (self.usn(), type),
            ([x] for x in ids),
        )

    # Notes
    ##########################################################################

    def noteCount(self) -> Any:
        return self.db.scalar("select count() from notes")

    def newNote(self, forDeck: bool = True) -> Note:
        "Return a new note with the current model."
        return Note(self, self.models.current(forDeck))

    def addNote(self, note: Note) -> int:
        """Add a note to the collection. Return number of new cards."""
        # check we have card models available, then save
        cms = self.findTemplates(note)
        if not cms:
            return 0
        note.flush()
        # deck conf governs which of these are used
        due = self.nextID("pos")
        # add cards
        ncards = 0
        for template in cms:
            self._newCard(note, template, due)
            ncards += 1
        return ncards

    def remNotes(self, ids: Iterable[int]) -> None:
        """Deletes notes with the given IDs."""
        self.remCards(self.db.list("select id from cards where nid in " + ids2str(ids)))

    def _remNotes(self, ids: List[int]) -> None:
        """Bulk delete notes by ID. Don't call this directly."""
        if not ids:
            return
        strids = ids2str(ids)
        # we need to log these independently of cards, as one side may have
        # more card templates
        hooks.notes_will_be_deleted(self, ids)
        self._logRem(ids, REM_NOTE)
        self.db.execute("delete from notes where id in %s" % strids)

    # Card creation
    ##########################################################################

    def findTemplates(self, note: Note) -> List:
        "Return (active), non-empty templates."
        model = note.model()
        avail = self.models.availOrds(model, joinFields(note.fields))
        return self._tmplsFromOrds(model, avail)

    def _tmplsFromOrds(self, model: NoteType, avail: List[int]) -> List:
        ok = []
        if model["type"] == MODEL_STD:
            for t in model["tmpls"]:
                if t["ord"] in avail:
                    ok.append(t)
        else:
            # cloze - generate temporary templates from first
            for ord in avail:
                t = copy.copy(model["tmpls"][0])
                t["ord"] = ord
                ok.append(t)
        return ok

    def genCards(self, nids: List[int]) -> List[int]:
        "Generate cards for non-empty templates, return ids to remove."
        # build map of (nid,ord) so we don't create dupes
        snids = ids2str(nids)
        have: Dict[int, Dict[int, int]] = {}
        dids: Dict[int, Optional[int]] = {}
        dues: Dict[int, int] = {}
        for id, nid, ord, did, due, odue, odid, type in self.db.execute(
            "select id, nid, ord, did, due, odue, odid, type from cards where nid in "
            + snids
        ):
            # existing cards
            if nid not in have:
                have[nid] = {}
            have[nid][ord] = id
            # if in a filtered deck, add new cards to original deck
            if odid != 0:
                did = odid
            # and their dids
            if nid in dids:
                if dids[nid] and dids[nid] != did:
                    # cards are in two or more different decks; revert to
                    # model default
                    dids[nid] = None
            else:
                # first card or multiple cards in same deck
                dids[nid] = did
            # save due
            if odid != 0:
                due = odue
            if nid not in dues and type == 0:
                # Add due to new card only if it's the due of a new sibling
                dues[nid] = due
        # build cards for each note
        data = []
        ts = maxID(self.db)
        now = intTime()
        rem = []
        usn = self.usn()
        for nid, mid, flds in self.db.execute(
            "select id, mid, flds from notes where id in " + snids
        ):
            model = self.models.get(mid)
            assert model
            avail = self.models.availOrds(model, flds)
            did = dids.get(nid) or model["did"]
            due = dues.get(nid)
            # add any missing cards
            for t in self._tmplsFromOrds(model, avail):
                doHave = nid in have and t["ord"] in have[nid]
                if not doHave:
                    # check deck is not a cram deck
                    did = t["did"] or did
                    if self.decks.isDyn(did):
                        did = 1
                    # if the deck doesn't exist, use default instead
                    did = self.decks.get(did)["id"]
                    # use sibling due# if there is one, else use a new id
                    if due is None:
                        due = self.nextID("pos")
                    data.append((ts, nid, did, t["ord"], now, usn, due))
                    ts += 1
            # note any cards that need removing
            if nid in have:
                for ord, id in list(have[nid].items()):
                    if ord not in avail:
                        rem.append(id)
        # bulk update
        self.db.executemany(
            """
insert into cards values (?,?,?,?,?,?,0,0,?,0,0,0,0,0,0,0,0,"")""",
            data,
        )
        return rem

    # type is no longer used
    def previewCards(
        self, note: Note, type: int = 0, did: Optional[int] = None
    ) -> List:
        existing_cards = {}
        for card in note.cards():
            existing_cards[card.ord] = card

        all_cards = []
        for idx, template in enumerate(note.model()["tmpls"]):
            if idx in existing_cards:
                all_cards.append(existing_cards[idx])
            else:
                # card not currently in database, generate an ephemeral one
                all_cards.append(self._newCard(note, template, 1, flush=False, did=did))

        return all_cards

    def _newCard(
        self,
        note: Note,
        template: Template,
        due: int,
        flush: bool = True,
        did: Optional[int] = None,
    ) -> Card:
        "Create a new card."
        card = Card(self)
        card.nid = note.id
        card.ord = template["ord"]  # type: ignore
        card.did = self.db.scalar(
            "select did from cards where nid = ? and ord = ?", card.nid, card.ord
        )
        # Use template did (deck override) if valid, otherwise did in argument, otherwise model did
        if not card.did:
            if template["did"] and str(template["did"]) in self.decks.decks:
                card.did = int(template["did"])
            elif did:
                card.did = did
            else:
                card.did = note.model()["did"]
        # if invalid did, use default instead
        deck = self.decks.get(card.did)
        assert deck
        if deck["dyn"]:
            # must not be a filtered deck
            card.did = 1
        else:
            card.did = deck["id"]
        card.due = self._dueForDid(card.did, due)
        if flush:
            card.flush()
        return card

    def _dueForDid(self, did: int, due: int) -> int:
        conf = self.decks.confForDid(did)
        # in order due?
        if conf["new"]["order"] == NEW_CARDS_DUE:
            return due
        else:
            # random mode; seed with note ts so all cards of this note get the
            # same random number
            r = random.Random()
            r.seed(due)
            return r.randrange(1, max(due, 1000))

    # Cards
    ##########################################################################

    def isEmpty(self) -> bool:
        return not self.db.scalar("select 1 from cards limit 1")

    def cardCount(self) -> Any:
        return self.db.scalar("select count() from cards")

    def remCards(self, ids: List[int], notes: bool = True) -> None:
        "Bulk delete cards by ID."
        if not ids:
            return
        sids = ids2str(ids)
        nids = self.db.list("select nid from cards where id in " + sids)
        # remove cards
        self._logRem(ids, REM_CARD)
        self.db.execute("delete from cards where id in " + sids)
        # then notes
        if not notes:
            return
        nids = self.db.list(
            """
select id from notes where id in %s and id not in (select nid from cards)"""
            % ids2str(nids)
        )
        self._remNotes(nids)

    def emptyCids(self) -> List[int]:
        """Returns IDs of empty cards."""
        rem: List[int] = []
        for m in self.models.all():
            rem += self.genCards(self.models.nids(m))
        return rem

    def emptyCardReport(self, cids) -> str:
        rep = ""
        for ords, cnt, flds in self.db.all(
            """
select group_concat(ord+1), count(), flds from cards c, notes n
where c.nid = n.id and c.id in %s group by nid"""
            % ids2str(cids)
        ):
            rep += _("Empty card numbers: %(c)s\nFields: %(f)s\n\n") % dict(
                c=ords, f=flds.replace("\x1f", " / ")
            )
        return rep

    # Field checksums and sorting fields
    ##########################################################################

    def _fieldData(self, snids: str) -> Any:
        return self.db.execute("select id, mid, flds from notes where id in " + snids)

    def updateFieldCache(self, nids: List[int]) -> None:
        "Update field checksums and sort cache, after find&replace, etc."
        snids = ids2str(nids)
        r = []
        for (nid, mid, flds) in self._fieldData(snids):
            fields = splitFields(flds)
            model = self.models.get(mid)
            if not model:
                # note points to invalid model
                continue
            r.append(
                (
                    stripHTMLMedia(fields[self.models.sortIdx(model)]),
                    fieldChecksum(fields[0]),
                    nid,
                )
            )
        # apply, relying on calling code to bump usn+mod
        self.db.executemany("update notes set sfld=?, csum=? where id=?", r)

    # Finding cards
    ##########################################################################

    def findCards(self, query: str, order: Union[bool, str] = False) -> Any:
        return anki.find.Finder(self).findCards(query, order)

    def findNotes(self, query: str) -> Any:
        return anki.find.Finder(self).findNotes(query)

    def findReplace(
        self,
        nids: List[int],
        src: str,
        dst: str,
        regex: Optional[bool] = None,
        field: Optional[str] = None,
        fold: bool = True,
    ) -> int:
        return anki.find.findReplace(self, nids, src, dst, regex, field, fold)

    def findDupes(self, fieldName: str, search: str = "") -> List[Tuple[Any, list]]:
        return anki.find.findDupes(self, fieldName, search)

    # Stats
    ##########################################################################

    def cardStats(self, card: Card) -> str:
        from anki.stats import CardStats

        return CardStats(self, card).report()

    def stats(self) -> "anki.stats.CollectionStats":
        from anki.stats import CollectionStats

        return CollectionStats(self)

    # Timeboxing
    ##########################################################################

    def startTimebox(self) -> None:
        self._startTime = time.time()
        self._startReps = self.sched.reps

    def timeboxReached(self) -> Union[bool, Tuple[Any, int]]:
        "Return (elapsedTime, reps) if timebox reached, or False."
        if not self.conf["timeLim"]:
            # timeboxing disabled
            return False
        elapsed = time.time() - self._startTime
        if elapsed > self.conf["timeLim"]:
            return (self.conf["timeLim"], self.sched.reps - self._startReps)
        return False

    # Undo
    ##########################################################################

    def clearUndo(self) -> None:
        # [type, undoName, data]
        # type 1 = review; type 2 = checkpoint
        self._undo = None

    def undoName(self) -> Any:
        "Undo menu item name, or None if undo unavailable."
        if not self._undo:
            return None
        return self._undo[1]

    def undo(self) -> Any:
        if self._undo[0] == 1:
            return self._undoReview()
        else:
            self._undoOp()

    def markReview(self, card: Card) -> None:
        old: List[Any] = []
        if self._undo:
            if self._undo[0] == 1:
                old = self._undo[2]
            self.clearUndo()
        wasLeech = card.note().hasTag("leech") or False
        self._undo = [1, _("Review"), old + [copy.copy(card)], wasLeech]

    def _undoReview(self) -> Any:
        data = self._undo[2]
        wasLeech = self._undo[3]
        c = data.pop()  # pytype: disable=attribute-error
        if not data:
            self.clearUndo()
        # remove leech tag if it didn't have it before
        if not wasLeech and c.note().hasTag("leech"):
            c.note().delTag("leech")
            c.note().flush()
        # write old data
        c.flush()
        # and delete revlog entry
        last = self.db.scalar(
            "select id from revlog where cid = ? " "order by id desc limit 1", c.id
        )
        self.db.execute("delete from revlog where id = ?", last)
        # restore any siblings
        self.db.execute(
            "update cards set queue=type,mod=?,usn=? where queue=-2 and nid=?",
            intTime(),
            self.usn(),
            c.nid,
        )
        # and finally, update daily counts
        n = 1 if c.queue == 3 else c.queue
        type = ("new", "lrn", "rev")[n]
        self.sched._updateStats(c, type, -1)
        self.sched.reps -= 1
        return c.id

    def _markOp(self, name: Optional[str]) -> None:
        "Call via .save()"
        if name:
            self._undo = [2, name]
        else:
            # saving disables old checkpoint, but not review undo
            if self._undo and self._undo[0] == 2:
                self.clearUndo()

    def _undoOp(self) -> None:
        self.rollback()
        self.clearUndo()

    # DB maintenance
    ##########################################################################

    def basicCheck(self) -> bool:
        "Basic integrity check for syncing. True if ok."
        # cards without notes
        if self.db.scalar(
            """
select 1 from cards where nid not in (select id from notes) limit 1"""
        ):
            return False
        # notes without cards or models
        if self.db.scalar(
            """
select 1 from notes where id not in (select distinct nid from cards)
or mid not in %s limit 1"""
            % ids2str(self.models.ids())
        ):
            return False
        # invalid ords
        for m in self.models.all():
            # ignore clozes
            if m["type"] != MODEL_STD:
                continue
            if self.db.scalar(
                """
select 1 from cards where ord not in %s and nid in (
select id from notes where mid = ?) limit 1"""
                % ids2str([t["ord"] for t in m["tmpls"]]),
                m["id"],
            ):
                return False
        return True

    def fixIntegrity(self) -> Tuple[str, bool]:
        """Fix possible problems and rebuild caches.

        Returns tuple of (error: str, ok: bool). 'ok' will be true if no
        problems were found.
        """
        problems = []
        curs = self.db.cursor()
        self.save()
        oldSize = os.stat(self.path)[stat.ST_SIZE]
        if self.db.scalar("pragma integrity_check") != "ok":
            return (_("Collection is corrupt. Please see the manual."), False)
        # note types with a missing model
        ids = self.db.list(
            """
select id from notes where mid not in """
            + ids2str(self.models.ids())
        )
        if ids:
            problems.append(
                ngettext(
                    "Deleted %d note with missing note type.",
                    "Deleted %d notes with missing note type.",
                    len(ids),
                )
                % len(ids)
            )
            self.remNotes(ids)
        # for each model
        for m in self.models.all():
            for t in m["tmpls"]:
                if t["did"] == "None":
                    t["did"] = None
                    problems.append(_("Fixed AnkiDroid deck override bug."))
                    self.models.save(m, updateReqs=False)
            if m["type"] == MODEL_STD:
                # model with missing req specification
                if "req" not in m:
                    self.models._updateRequired(m)
                    problems.append(_("Fixed note type: %s") % m["name"])
                # cards with invalid ordinal
                ids = self.db.list(
                    """
select id from cards where ord not in %s and nid in (
select id from notes where mid = ?)"""
                    % ids2str([t["ord"] for t in m["tmpls"]]),
                    m["id"],
                )
                if ids:
                    problems.append(
                        ngettext(
                            "Deleted %d card with missing template.",
                            "Deleted %d cards with missing template.",
                            len(ids),
                        )
                        % len(ids)
                    )
                    self.remCards(ids)
            # notes with invalid field count
            ids = []
            for id, flds in self.db.execute(
                "select id, flds from notes where mid = ?", m["id"]
            ):
                if (flds.count("\x1f") + 1) != len(m["flds"]):
                    ids.append(id)
            if ids:
                problems.append(
                    ngettext(
                        "Deleted %d note with wrong field count.",
                        "Deleted %d notes with wrong field count.",
                        len(ids),
                    )
                    % len(ids)
                )
                self.remNotes(ids)
        # delete any notes with missing cards
        ids = self.db.list(
            """
select id from notes where id not in (select distinct nid from cards)"""
        )
        if ids:
            cnt = len(ids)
            problems.append(
                ngettext(
                    "Deleted %d note with no cards.",
                    "Deleted %d notes with no cards.",
                    cnt,
                )
                % cnt
            )
            self._remNotes(ids)
        # cards with missing notes
        ids = self.db.list(
            """
select id from cards where nid not in (select id from notes)"""
        )
        if ids:
            cnt = len(ids)
            problems.append(
                ngettext(
                    "Deleted %d card with missing note.",
                    "Deleted %d cards with missing note.",
                    cnt,
                )
                % cnt
            )
            self.remCards(ids)
        # cards with odue set when it shouldn't be
        ids = self.db.list(
            """
select id from cards where odue > 0 and (type=1 or queue=2) and not odid"""
        )
        if ids:
            cnt = len(ids)
            problems.append(
                ngettext(
                    "Fixed %d card with invalid properties.",
                    "Fixed %d cards with invalid properties.",
                    cnt,
                )
                % cnt
            )
            self.db.execute("update cards set odue=0 where id in " + ids2str(ids))
        # cards with odid set when not in a dyn deck
        dids = [id for id in self.decks.allIds() if not self.decks.isDyn(id)]
        ids = self.db.list(
            """
select id from cards where odid > 0 and did in %s"""
            % ids2str(dids)
        )
        if ids:
            cnt = len(ids)
            problems.append(
                ngettext(
                    "Fixed %d card with invalid properties.",
                    "Fixed %d cards with invalid properties.",
                    cnt,
                )
                % cnt
            )
            self.db.execute(
                "update cards set odid=0, odue=0 where id in " + ids2str(ids)
            )
        # tags
        self.tags.registerNotes()
        # field cache
        for m in self.models.all():
            self.updateFieldCache(self.models.nids(m))
        # new cards can't have a due position > 32 bits, so wrap items over
        # 2 million back to 1 million
        curs.execute(
            """
update cards set due=1000000+due%1000000,mod=?,usn=? where due>=1000000
and type=0""",
            [intTime(), self.usn()],
        )
        if curs.rowcount:
            problems.append(
                "Found %d new cards with a due number >= 1,000,000 - consider repositioning them in the Browse screen."
                % curs.rowcount
            )
        # new card position
        self.conf["nextPos"] = (
            self.db.scalar("select max(due)+1 from cards where type = 0") or 0
        )
        # reviews should have a reasonable due #
        ids = self.db.list("select id from cards where queue = 2 and due > 100000")
        if ids:
            problems.append("Reviews had incorrect due date.")
            self.db.execute(
                "update cards set due = ?, ivl = 1, mod = ?, usn = ? where id in %s"
                % ids2str(ids),
                self.sched.today,
                intTime(),
                self.usn(),
            )
        # v2 sched had a bug that could create decimal intervals
        curs.execute(
            "update cards set ivl=round(ivl),due=round(due) where ivl!=round(ivl) or due!=round(due)"
        )
        if curs.rowcount:
            problems.append("Fixed %d cards with v2 scheduler bug." % curs.rowcount)

        curs.execute(
            "update revlog set ivl=round(ivl),lastIvl=round(lastIvl) where ivl!=round(ivl) or lastIvl!=round(lastIvl)"
        )
        if curs.rowcount:
            problems.append(
                "Fixed %d review history entries with v2 scheduler bug." % curs.rowcount
            )
        # models
        if self.models.ensureNotEmpty():
            problems.append("Added missing note type.")
        # and finally, optimize
        self.optimize()
        newSize = os.stat(self.path)[stat.ST_SIZE]
        txt = _("Database rebuilt and optimized.")
        ok = not problems
        problems.append(txt)
        # if any problems were found, force a full sync
        if not ok:
            self.modSchema(check=False)
        self.save()
        return ("\n".join(problems), ok)

    def optimize(self) -> None:
        self.db.setAutocommit(True)
        self.db.execute("vacuum")
        self.db.execute("analyze")
        self.db.setAutocommit(False)
        self.lock()

    # Logging
    ##########################################################################

    def log(self, *args, **kwargs) -> None:
        if not self._debugLog:
            return

        def customRepr(x):
            if isinstance(x, str):
                return x
            return pprint.pformat(x)

        path, num, fn, y = traceback.extract_stack(limit=2 + kwargs.get("stack", 0))[0]
        buf = "[%s] %s:%s(): %s" % (
            intTime(),
            os.path.basename(path),
            fn,
            ", ".join([customRepr(x) for x in args]),
        )
        self._logHnd.write(buf + "\n")
        if devMode:
            print(buf)

    def _openLog(self) -> None:
        if not self._debugLog:
            return
        lpath = re.sub(r"\.anki2$", ".log", self.path)
        if os.path.exists(lpath) and os.path.getsize(lpath) > 10 * 1024 * 1024:
            lpath2 = lpath + ".old"
            if os.path.exists(lpath2):
                os.unlink(lpath2)
            os.rename(lpath, lpath2)
        self._logHnd = open(lpath, "a", encoding="utf8")

    def _closeLog(self) -> None:
        if not self._debugLog:
            return
        self._logHnd.close()
        self._logHnd = None

    # Card Flags
    ##########################################################################

    def setUserFlag(self, flag: int, cids: List[int]) -> None:
        assert 0 <= flag <= 7
        self.db.execute(
            "update cards set flags = (flags & ~?) | ?, usn=?, mod=? where id in %s"
            % ids2str(cids),
            0b111,
            flag,
            self.usn(),
            intTime(),
        )
Ejemplo n.º 16
0
Archivo: utils.py Proyecto: ZX1209/anki
def maxID(db: DB) -> int:
    "Return the first safe ID to use."
    now = intTime(1000)
    for tbl in "cards", "notes":
        now = max(now, db.scalar("select max(id) from %s" % tbl) or 0)
    return now + 1
class MediaManager(object):

    soundRegexps = ["(?i)(\[sound:(?P<fname>[^]]+)\])"]
    imgRegexps = [
        # src element quoted case
        "(?i)(<img[^>]* src=(?P<str>[\"'])(?P<fname>[^>]+?)(?P=str)[^>]*>)",
        # unquoted case
        "(?i)(<img[^>]* src=(?!['\"])(?P<fname>[^ >]+)[^>]*?>)",
    ]
    regexps = soundRegexps + imgRegexps

    def __init__(self, col, server):
        self.col = col
        if server:
            self._dir = None
            return
        # media directory
        self._dir = re.sub("(?i)\.(anki2)$", ".media", self.col.path)
        # convert dir to unicode if it's not already
        if isinstance(self._dir, str):
            self._dir = str(self._dir)
        if not os.path.exists(self._dir):
            os.makedirs(self._dir)
        try:
            self._oldcwd = os.getcwd()
        except OSError:
            # cwd doesn't exist
            self._oldcwd = None
        try:
            os.chdir(self._dir)
        except OSError:
            raise Exception("invalidTempFolder")
        # change database
        self.connect()

    def connect(self):
        if self.col.server:
            return
        path = self.dir() + ".db2"
        create = not os.path.exists(path)
        os.chdir(self._dir)
        self.db = DB(path)
        if create:
            self._initDB()
        self.maybeUpgrade()

    def _initDB(self):
        self.db.executescript("""
create table media (
 fname text not null primary key,
 csum text,           -- null indicates deleted file
 mtime int not null,  -- zero if deleted
 dirty int not null
);

create index idx_media_dirty on media (dirty);

create table meta (dirMod int, lastUsn int); insert into meta values (0, 0);
""")

    def maybeUpgrade(self):
        oldpath = self.dir() + ".db"
        if os.path.exists(oldpath):
            self.db.execute('attach "../collection.media.db" as old')
            try:
                self.db.execute("""
    insert into media
     select m.fname, csum, mod, ifnull((select 1 from log l2 where l2.fname=m.fname), 0) as dirty
     from old.media m
     left outer join old.log l using (fname)
     union
     select fname, null, 0, 1 from old.log where type=1;""")
                self.db.execute("delete from meta")
                self.db.execute("""
    insert into meta select dirMod, usn from old.meta
    """)
                self.db.commit()
            except Exception as e:
                # if we couldn't import the old db for some reason, just start
                # anew
                self.col.log("failed to import old media db:" +
                             traceback.format_exc())
            self.db.execute("detach old")
            npath = "../collection.media.db.old"
            if os.path.exists(npath):
                os.unlink(npath)
            os.rename("../collection.media.db", npath)

    def close(self):
        if self.col.server:
            return
        self.db.close()
        self.db = None
        # change cwd back to old location
        if self._oldcwd:
            try:
                os.chdir(self._oldcwd)
            except:
                # may have been deleted
                pass

    def dir(self):
        return self._dir

    def _isFAT32(self):
        if not isWin:
            return
        import win32api, win32file
        try:
            name = win32file.GetVolumeNameForVolumeMountPoint(self._dir[:3])
        except:
            # mapped & unmapped network drive; pray that it's not vfat
            return
        if win32api.GetVolumeInformation(name)[4].lower().startswith("fat"):
            return True

    # Adding media
    ##########################################################################
    # opath must be in unicode

    def addFile(self, opath):
        return self.writeData(opath, open(opath, "rb").read())

    def writeData(self, opath, data):
        # if fname is a full path, use only the basename
        fname = os.path.basename(opath)
        # make sure we write it in NFC form (on mac will autoconvert to NFD),
        # and return an NFC-encoded reference
        fname = unicodedata.normalize("NFC", fname)
        # remove any dangerous characters
        base = self.stripIllegal(fname)
        (root, ext) = os.path.splitext(base)

        def repl(match):
            n = int(match.group(1))
            return " (%d)" % (n + 1)

        # find the first available name
        csum = checksum(data)
        while True:
            fname = root + ext
            path = os.path.join(self.dir(), fname)
            # if it doesn't exist, copy it directly
            if not os.path.exists(path):
                open(path, "wb").write(data)
                return fname
            # if it's identical, reuse
            if checksum(open(path, "rb").read()) == csum:
                return fname
            # otherwise, increment the index in the filename
            reg = " \((\d+)\)$"
            if not re.search(reg, root):
                root = root + " (1)"
            else:
                root = re.sub(reg, repl, root)

    # String manipulation
    ##########################################################################

    def filesInStr(self, mid, string, includeRemote=False):
        from anki.latex import mungeQA
        l = []
        model = self.col.models.get(mid)
        strings = []
        if model['type'] == MODEL_CLOZE and "{{c" in string:
            # if the field has clozes in it, we'll need to expand the
            # possibilities so we can render latex
            strings = self._expandClozes(string)
        else:
            strings = [string]
        for string in strings:
            # handle latex
            string = mungeQA(string, None, None, model, None, self.col)
            # extract filenames
            for reg in self.regexps:
                for match in re.finditer(reg, string):
                    fname = match.group("fname")
                    isLocal = not re.match("(https?|ftp)://", fname.lower())
                    if isLocal or includeRemote:
                        l.append(fname)
        return l

    def _expandClozes(self, string):
        ords = set(re.findall("{{c(\d+)::.+?}}", string))
        strings = []
        from anki.template.template import clozeReg

        def qrepl(m):
            if m.group(3):
                return "[%s]" % m.group(3)
            else:
                return "[...]"

        def arepl(m):
            return m.group(1)

        for ord in ords:
            s = re.sub(clozeReg % ord, qrepl, string)
            s = re.sub(clozeReg % ".+?", "\\1", s)
            strings.append(s)
        strings.append(re.sub(clozeReg % ".+?", arepl, string))
        return strings

    def transformNames(self, txt, func):
        for reg in self.regexps:
            txt = re.sub(reg, func, txt)
        return txt

    def strip(self, txt):
        for reg in self.regexps:
            txt = re.sub(reg, "", txt)
        return txt

    def escapeImages(self, string, unescape=False):
        if unescape:
            fn = urllib.parse.unquote
        else:
            fn = urllib.parse.quote

        def repl(match):
            tag = match.group(0)
            fname = match.group("fname")
            if re.match("(https?|ftp)://", fname):
                return tag
            return tag.replace(fname, str(fn(fname.encode("utf-8")), "utf8"))

        for reg in self.imgRegexps:
            string = re.sub(reg, repl, string)
        return string

    # Rebuilding DB
    ##########################################################################

    def check(self, local=None):
        "Return (missingFiles, unusedFiles)."
        mdir = self.dir()
        # gather all media references in NFC form
        allRefs = set()
        for nid, mid, flds in self.col.db.execute(
                "select id, mid, flds from notes"):
            noteRefs = self.filesInStr(mid, flds)
            # check the refs are in NFC
            for f in noteRefs:
                # if they're not, we'll need to fix them first
                if f != unicodedata.normalize("NFC", f):
                    self._normalizeNoteRefs(nid)
                    noteRefs = self.filesInStr(mid, flds)
                    break
            allRefs.update(noteRefs)
        # loop through media folder
        unused = []
        invalid = []
        if local is None:
            files = os.listdir(mdir)
        else:
            files = local
        renamedFiles = False
        for file in files:
            if not local:
                if not os.path.isfile(file):
                    # ignore directories
                    continue
            if file.startswith("_"):
                # leading _ says to ignore file
                continue
            if not isinstance(file, str):
                invalid.append(
                    str(file, sys.getfilesystemencoding(), "replace"))
                continue
            nfcFile = unicodedata.normalize("NFC", file)
            # we enforce NFC fs encoding on non-macs; on macs we'll have gotten
            # NFD so we use the above variable for comparing references
            if not isMac and not local:
                if file != nfcFile:
                    # delete if we already have the NFC form, otherwise rename
                    if os.path.exists(nfcFile):
                        os.unlink(file)
                        renamedFiles = True
                    else:
                        os.rename(file, nfcFile)
                        renamedFiles = True
                    file = nfcFile
            # compare
            if nfcFile not in allRefs:
                unused.append(file)
            else:
                allRefs.discard(nfcFile)
        # if we renamed any files to nfc format, we must rerun the check
        # to make sure the renamed files are not marked as unused
        if renamedFiles:
            return self.check(local=local)
        nohave = [x for x in allRefs if not x.startswith("_")]
        return (nohave, unused, invalid)

    def _normalizeNoteRefs(self, nid):
        note = self.col.getNote(nid)
        for c, fld in enumerate(note.fields):
            nfc = unicodedata.normalize("NFC", fld)
            if nfc != fld:
                note.fields[c] = nfc
        note.flush()

    # Copying on import
    ##########################################################################

    def have(self, fname):
        return os.path.exists(os.path.join(self.dir(), fname))

    # Illegal characters
    ##########################################################################

    _illegalCharReg = re.compile(r'[][><:"/?*^\\|\0\r\n]')

    def stripIllegal(self, str):
        return re.sub(self._illegalCharReg, "", str)

    def hasIllegal(self, str):
        # a file that couldn't be decoded to unicode is considered invalid
        if not isinstance(str, str):
            return True
        return not not re.search(self._illegalCharReg, str)

    # Tracking changes
    ##########################################################################

    def findChanges(self):
        "Scan the media folder if it's changed, and note any changes."
        if self._changed():
            self._logChanges()

    def haveDirty(self):
        return self.db.scalar("select 1 from media where dirty=1 limit 1")

    def _mtime(self, path):
        return int(os.stat(path).st_mtime)

    def _checksum(self, path):
        return checksum(open(path, "rb").read())

    def _changed(self):
        "Return dir mtime if it has changed since the last findChanges()"
        # doesn't track edits, but user can add or remove a file to update
        mod = self.db.scalar("select dirMod from meta")
        mtime = self._mtime(self.dir())
        if not self._isFAT32() and mod and mod == mtime:
            return False
        return mtime

    def _logChanges(self):
        (added, removed) = self._changes()
        media = []
        for f in added:
            mt = self._mtime(f)
            media.append((f, self._checksum(f), mt, 1))
        for f in removed:
            media.append((f, None, 0, 1))
        # update media db
        self.db.executemany("insert or replace into media values (?,?,?,?)",
                            media)
        self.db.execute("update meta set dirMod = ?", self._mtime(self.dir()))
        self.db.commit()

    def _changes(self):
        self.cache = {}
        for (name, csum, mod) in self.db.execute(
                "select fname, csum, mtime from media where csum is not null"):
            self.cache[name] = [csum, mod, False]
        added = []
        removed = []
        # loop through on-disk files
        for f in os.listdir(self.dir()):
            # ignore folders and thumbs.db
            if os.path.isdir(f):
                continue
            if f.lower() == "thumbs.db":
                continue
            # and files with invalid chars
            if self.hasIllegal(f):
                continue
            # empty files are invalid; clean them up and continue
            sz = os.path.getsize(f)
            if not sz:
                os.unlink(f)
                continue
            if sz > 100 * 1024 * 1024:
                self.col.log("ignoring file over 100MB", f)
                continue
            # check encoding
            if not isMac:
                normf = unicodedata.normalize("NFC", f)
                if f != normf:
                    # wrong filename encoding which will cause sync errors
                    if os.path.exists(normf):
                        os.unlink(f)
                    else:
                        os.rename(f, normf)
            # newly added?
            if f not in self.cache:
                added.append(f)
            else:
                # modified since last time?
                if self._mtime(f) != self.cache[f][1]:
                    # and has different checksum?
                    if self._checksum(f) != self.cache[f][0]:
                        added.append(f)
                # mark as used
                self.cache[f][2] = True
        # look for any entries in the cache that no longer exist on disk
        for (k, v) in list(self.cache.items()):
            if not v[2]:
                removed.append(k)
        return added, removed

    # Syncing-related
    ##########################################################################

    def lastUsn(self):
        return self.db.scalar("select lastUsn from meta")

    def setLastUsn(self, usn):
        self.db.execute("update meta set lastUsn = ?", usn)
        self.db.commit()

    def syncInfo(self, fname):
        ret = self.db.first("select csum, dirty from media where fname=?",
                            fname)
        return ret or (None, 0)

    def markClean(self, fnames):
        for fname in fnames:
            self.db.execute("update media set dirty=0 where fname=?", fname)

    def syncDelete(self, fname):
        if os.path.exists(fname):
            os.unlink(fname)
        self.db.execute("delete from media where fname=?", fname)

    def mediaCount(self):
        return self.db.scalar(
            "select count() from media where csum is not null")

    def dirtyCount(self):
        return self.db.scalar("select count() from media where dirty=1")

    def forceResync(self):
        self.db.execute("delete from media")
        self.db.execute("update meta set lastUsn=0,dirMod=0")
        self.db.execute("vacuum analyze")
        self.db.commit()

    # Media syncing: zips
    ##########################################################################

    def mediaChangesZip(self):
        from io import StringIO
        f = StringIO()
        z = zipfile.ZipFile(f, "w", compression=zipfile.ZIP_DEFLATED)

        fnames = []
        # meta is list of (fname, zipname), where zipname of None
        # is a deleted file
        meta = []
        sz = 0

        for c, (fname, csum) in enumerate(
                self.db.execute("select fname, csum from media where dirty=1"
                                " limit %d" % SYNC_ZIP_COUNT)):

            fnames.append(fname)
            normname = unicodedata.normalize("NFC", fname)

            if csum:
                self.col.log("+media zip", fname)
                z.write(fname, str(c))
                meta.append((normname, str(c)))
                sz += os.path.getsize(fname)
            else:
                self.col.log("-media zip", fname)
                meta.append((normname, ""))

            if sz >= SYNC_ZIP_SIZE:
                break

        z.writestr("_meta", json.dumps(meta))
        z.close()
        return f.getvalue(), fnames

    def addFilesFromZip(self, zipData):
        "Extract zip data; true if finished."
        from io import StringIO
        f = StringIO(zipData)
        z = zipfile.ZipFile(f, "r")
        media = []
        # get meta info first
        meta = json.loads(z.read("_meta"))
        # then loop through all files
        cnt = 0
        for i in z.infolist():
            if i.filename == "_meta":
                # ignore previously-retrieved meta
                continue
            else:
                data = z.read(i)
                csum = checksum(data)
                name = meta[i.filename]
                if not isinstance(name, str):
                    name = str(name, "utf8")
                # normalize name for platform
                if isMac:
                    name = unicodedata.normalize("NFD", name)
                else:
                    name = unicodedata.normalize("NFC", name)
                # save file
                open(name, "wb").write(data)
                # update db
                media.append((name, csum, self._mtime(name), 0))
                cnt += 1
        if media:
            self.db.executemany(
                "insert or replace into media values (?,?,?,?)", media)
        return cnt
Ejemplo n.º 18
0
class ProfileManager:
    def __init__(self, base=None):
        self.name = None
        self.db = None
        # instantiate base folder
        self._setBaseFolder(base)

    def setupMeta(self) -> LoadMetaResult:
        # load metadata
        res = self._loadMeta()
        self.firstRun = res.firstTime
        return res

    # profile load on startup
    def openProfile(self, profile):
        if profile:
            if profile not in self.profiles():
                QMessageBox.critical(None, "Error", "Requested profile does not exist.")
                sys.exit(1)
            try:
                self.load(profile)
            except TypeError:
                raise Exception("Provided profile does not exist.")

    # Base creation
    ######################################################################

    def ensureBaseExists(self):
        self._ensureExists(self.base)

    # Folder migration
    ######################################################################

    def _oldFolderLocation(self):
        if isMac:
            return os.path.expanduser("~/Documents/Anki")
        elif isWin:
            from aqt.winpaths import get_personal

            return os.path.join(get_personal(), "Anki")
        else:
            p = os.path.expanduser("~/Anki")
            if os.path.isdir(p):
                return p
            return os.path.expanduser("~/Documents/Anki")

    def maybeMigrateFolder(self):
        newBase = self.base
        oldBase = self._oldFolderLocation()

        if oldBase and not os.path.exists(self.base) and os.path.isdir(oldBase):
            try:
                # if anything goes wrong with UI, reset to the old behavior of always migrating
                self._tryToMigrateFolder(oldBase)
            except AnkiRestart:
                raise
            except:
                self.base = newBase
                shutil.move(oldBase, self.base)

    def _tryToMigrateFolder(self, oldBase):
        from PyQt5 import QtWidgets, QtGui

        app = QtWidgets.QApplication([])
        icon = QtGui.QIcon()
        icon.addPixmap(
            QtGui.QPixmap(":/icons/anki.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off,
        )
        window_title = "Data Folder Migration"
        migration_directories = f"\n\n    {oldBase}\n\nto\n\n    {self.base}"

        confirmation = QMessageBox()
        confirmation.setIcon(QMessageBox.Warning)
        confirmation.setWindowIcon(icon)
        confirmation.setStandardButtons(QMessageBox.Ok | QMessageBox.Cancel)
        confirmation.setWindowTitle(window_title)
        confirmation.setText(
            "Anki needs to move its data folder from Documents/Anki to a new location. Proceed?"
        )
        retval = confirmation.exec()

        if retval == QMessageBox.Ok:
            progress = QMessageBox()
            progress.setIcon(QMessageBox.Information)
            progress.setStandardButtons(QMessageBox.NoButton)
            progress.setWindowIcon(icon)
            progress.setWindowTitle(window_title)
            progress.setText("Please wait...")
            progress.show()
            app.processEvents()

            shutil.move(oldBase, self.base)
            progress.hide()

            completion = QMessageBox()
            completion.setIcon(QMessageBox.Information)
            completion.setStandardButtons(QMessageBox.Ok)
            completion.setWindowIcon(icon)
            completion.setWindowTitle(window_title)
            completion.setText("Migration complete. Please start Anki again.")
            completion.show()
            completion.exec()
        else:
            diag = QMessageBox()
            diag.setIcon(QMessageBox.Warning)
            diag.setWindowIcon(icon)
            diag.setStandardButtons(QMessageBox.Ok)
            diag.setWindowTitle(window_title)
            diag.setText(
                "Migration aborted. If you would like to keep the old folder location, please "
                "see the Startup Options section of the manual. Anki will now quit."
            )
            diag.exec()

        raise AnkiRestart(exitcode=0)

    # Profile load/save
    ######################################################################

    def profiles(self):
        def names():
            return self.db.list("select name from profiles where name != '_global'")

        n = names()
        if not n:
            self._ensureProfile()
            n = names()

        return n

    def _unpickle(self, data):
        class Unpickler(pickle.Unpickler):
            def find_class(self, module, name):
                if module == "PyQt5.sip":
                    try:
                        import PyQt5.sip  # pylint: disable=unused-import
                    except:
                        # use old sip location
                        module = "sip"
                fn = super().find_class(module, name)
                if module == "sip" and name == "_unpickle_type":

                    def wrapper(mod, obj, args):
                        if mod.startswith("PyQt4") and obj == "QByteArray":
                            # can't trust str objects from python 2
                            return QByteArray()
                        return fn(mod, obj, args)

                    return wrapper
                else:
                    return fn

        up = Unpickler(io.BytesIO(data), errors="ignore")
        return up.load()

    def _pickle(self, obj):
        return pickle.dumps(obj, protocol=0)

    def load(self, name):
        assert name != "_global"
        data = self.db.scalar(
            "select cast(data as blob) from profiles where name = ?", name
        )
        self.name = name
        try:
            self.profile = self._unpickle(data)
        except:
            QMessageBox.warning(
                None,
                _("Profile Corrupt"),
                _(
                    """\
Anki could not read your profile data. Window sizes and your sync login \
details have been forgotten."""
                ),
            )

            print("resetting corrupt profile")
            self.profile = profileConf.copy()
            self.save()
        return True

    def save(self):
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, self._pickle(self.profile), self.name)
        self.db.execute(sql, self._pickle(self.meta), "_global")
        self.db.commit()

    def create(self, name):
        prof = profileConf.copy()
        self.db.execute(
            "insert or ignore into profiles values (?, ?)", name, self._pickle(prof)
        )
        self.db.commit()

    def remove(self, name):
        p = self.profileFolder()
        if os.path.exists(p):
            send2trash(p)
        self.db.execute("delete from profiles where name = ?", name)
        self.db.commit()

    def trashCollection(self):
        p = self.collectionPath()
        if os.path.exists(p):
            send2trash(p)

    def rename(self, name):
        oldName = self.name
        oldFolder = self.profileFolder()
        self.name = name
        newFolder = self.profileFolder(create=False)
        if os.path.exists(newFolder):
            if (oldFolder != newFolder) and (oldFolder.lower() == newFolder.lower()):
                # OS is telling us the folder exists because it does not take
                # case into account; use a temporary folder location
                midFolder = "".join([oldFolder, "-temp"])
                if not os.path.exists(midFolder):
                    os.rename(oldFolder, midFolder)
                    oldFolder = midFolder
                else:
                    showWarning(
                        _("Please remove the folder %s and try again.") % midFolder
                    )
                    self.name = oldName
                    return
            else:
                showWarning(_("Folder already exists."))
                self.name = oldName
                return

        # update name
        self.db.execute("update profiles set name = ? where name = ?", name, oldName)
        # rename folder
        try:
            os.rename(oldFolder, newFolder)
        except Exception as e:
            self.db.rollback()
            if "WinError 5" in str(e):
                showWarning(
                    _(
                        """\
Anki could not rename your profile because it could not rename the profile \
folder on disk. Please ensure you have permission to write to Documents/Anki \
and no other programs are accessing your profile folders, then try again."""
                    )
                )
            else:
                raise
        except:
            self.db.rollback()
            raise
        else:
            self.db.commit()

    # Folder handling
    ######################################################################

    def profileFolder(self, create=True):
        path = os.path.join(self.base, self.name)
        if create:
            self._ensureExists(path)
        return path

    def addonFolder(self):
        return self._ensureExists(os.path.join(self.base, "addons21"))

    def backupFolder(self):
        return self._ensureExists(os.path.join(self.profileFolder(), "backups"))

    def collectionPath(self):
        return os.path.join(self.profileFolder(), "collection.anki2")

    # Downgrade
    ######################################################################

    def downgrade(self, profiles=List[str]) -> List[str]:
        "Downgrade all profiles. Return a list of profiles that couldn't be opened."
        problem_profiles = []
        for name in profiles:
            path = os.path.join(self.base, name, "collection.anki2")
            if not os.path.exists(path):
                continue
            with DB(path) as db:
                if db.scalar("select ver from col") == 11:
                    # nothing to do
                    continue
            try:
                c = Collection(path)
                c.close(save=False, downgrade=True)
            except Exception as e:
                print(e)
                problem_profiles.append(name)
        return problem_profiles

    # Helpers
    ######################################################################

    def _ensureExists(self, path):
        if not os.path.exists(path):
            os.makedirs(path)
        return path

    def _setBaseFolder(self, cmdlineBase):
        if cmdlineBase:
            self.base = os.path.abspath(cmdlineBase)
        elif os.environ.get("ANKI_BASE"):
            self.base = os.path.abspath(os.environ["ANKI_BASE"])
        else:
            self.base = self._defaultBase()
            self.maybeMigrateFolder()
        self.ensureBaseExists()

    def _defaultBase(self):
        if isWin:
            from aqt.winpaths import get_appdata

            return os.path.join(get_appdata(), "Anki2")
        elif isMac:
            return os.path.expanduser("~/Library/Application Support/Anki2")
        else:
            dataDir = os.environ.get(
                "XDG_DATA_HOME", os.path.expanduser("~/.local/share")
            )
            if not os.path.exists(dataDir):
                os.makedirs(dataDir)
            return os.path.join(dataDir, "Anki2")

    def _loadMeta(self, retrying=False) -> LoadMetaResult:
        result = LoadMetaResult()
        result.firstTime = False
        result.loadError = retrying

        opath = os.path.join(self.base, "prefs.db")
        path = os.path.join(self.base, "prefs21.db")
        if not retrying and os.path.exists(opath) and not os.path.exists(path):
            shutil.copy(opath, path)

        result.firstTime = not os.path.exists(path)

        def recover():
            # if we can't load profile, start with a new one
            if self.db:
                try:
                    self.db.close()
                except:
                    pass
            for suffix in ("", "-journal"):
                fpath = path + suffix
                if os.path.exists(fpath):
                    os.unlink(fpath)

        # open DB file and read data
        try:
            self.db = DB(path)
            assert self.db.scalar("pragma integrity_check") == "ok"
            self.db.execute(
                """
create table if not exists profiles
(name text primary key, data text not null);"""
            )
            data = self.db.scalar(
                "select cast(data as blob) from profiles where name = '_global'"
            )
        except:
            if result.loadError:
                # already failed, prevent infinite loop
                raise
            # delete files and try again
            recover()
            return self._loadMeta(retrying=True)

        # try to read data
        if not result.firstTime:
            try:
                self.meta = self._unpickle(data)
                return result
            except:
                print("resetting corrupt _global")
                result.loadError = True
                result.firstTime = True

        # if new or read failed, create a default global profile
        self.meta = metaConf.copy()
        self.db.execute(
            "insert or replace into profiles values ('_global', ?)",
            self._pickle(metaConf),
        )
        return result

    def _ensureProfile(self):
        "Create a new profile if none exists."
        self.create(_("User 1"))
        p = os.path.join(self.base, "README.txt")
        with open(p, "w", encoding="utf8") as file:
            file.write(
                without_unicode_isolation(
                    tr(TR.PROFILES_FOLDER_README, link=appHelpSite + "#startupopts")
                )
            )

    # Default language
    ######################################################################
    # On first run, allow the user to choose the default language

    def setDefaultLang(self):
        # create dialog
        class NoCloseDiag(QDialog):
            def reject(self):
                pass

        d = self.langDiag = NoCloseDiag()
        f = self.langForm = aqt.forms.setlang.Ui_Dialog()
        f.setupUi(d)
        qconnect(d.accepted, self._onLangSelected)
        qconnect(d.rejected, lambda: True)
        # default to the system language
        try:
            (lang, enc) = locale.getdefaultlocale()
        except:
            # fails on osx
            lang = "en_US"
        # find index
        idx = None
        en = None
        for c, (name, code) in enumerate(anki.lang.langs):
            if code == "en_US":
                en = c
            if code == lang:
                idx = c
        # if the system language isn't available, revert to english
        if idx is None:
            idx = en
        # update list
        f.lang.addItems([x[0] for x in anki.lang.langs])
        f.lang.setCurrentRow(idx)
        d.exec_()

    def _onLangSelected(self):
        f = self.langForm
        obj = anki.lang.langs[f.lang.currentRow()]
        code = obj[1]
        name = obj[0]
        en = "Are you sure you wish to display Anki's interface in %s?"
        r = QMessageBox.question(
            None, "Anki", en % name, QMessageBox.Yes | QMessageBox.No, QMessageBox.No
        )
        if r != QMessageBox.Yes:
            return self.setDefaultLang()
        self.setLang(code)

    def setLang(self, code):
        self.meta["defaultLang"] = code
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, self._pickle(self.meta), "_global")
        self.db.commit()
        anki.lang.set_lang(code, locale_dir())

    # OpenGL
    ######################################################################

    def _glPath(self):
        return os.path.join(self.base, "gldriver")

    def glMode(self):
        if isMac:
            return "auto"

        path = self._glPath()
        if not os.path.exists(path):
            return "software"

        with open(path, "r") as file:
            mode = file.read().strip()

        if mode == "angle" and isWin:
            return mode
        elif mode == "software":
            return mode
        return "auto"

    def setGlMode(self, mode):
        with open(self._glPath(), "w") as file:
            file.write(mode)

    def nextGlMode(self):
        mode = self.glMode()
        if mode == "software":
            self.setGlMode("auto")
        elif mode == "auto":
            if isWin:
                self.setGlMode("angle")
            else:
                self.setGlMode("software")
        elif mode == "angle":
            self.setGlMode("software")

    # Shared options
    ######################################################################

    def uiScale(self) -> float:
        scale = self.meta.get("uiScale", 1.0)
        return max(scale, 1)

    def setUiScale(self, scale: float) -> None:
        self.meta["uiScale"] = scale

    def last_addon_update_check(self) -> int:
        return self.meta.get("last_addon_update_check", 0)

    def set_last_addon_update_check(self, secs):
        self.meta["last_addon_update_check"] = secs

    def night_mode(self) -> bool:
        return self.meta.get("night_mode", False)

    def set_night_mode(self, on: bool) -> None:
        self.meta["night_mode"] = on

    def dark_mode_widgets(self) -> bool:
        return self.meta.get("dark_mode_widgets", False)

    # Profile-specific
    ######################################################################

    def interrupt_audio(self) -> bool:
        return self.profile.get("interrupt_audio", True)

    def set_interrupt_audio(self, val: bool) -> None:
        self.profile["interrupt_audio"] = val
        aqt.sound.av_player.interrupt_current_audio = val

    def sync_key(self) -> Optional[str]:
        return self.profile.get("syncKey")

    def set_sync_key(self, val: Optional[str]) -> None:
        self.profile["syncKey"] = val

    def media_syncing_enabled(self) -> bool:
        return self.profile["syncMedia"]

    def sync_shard(self) -> Optional[int]:
        return self.profile.get("hostNum")

    ######################################################################

    def apply_profile_options(self) -> None:
        aqt.sound.av_player.interrupt_current_audio = self.interrupt_audio()
Ejemplo n.º 19
0
class MediaManager:

    soundRegexps = [r"(?i)(\[sound:(?P<fname>[^]]+)\])"]
    imgRegexps = [
        # src element quoted case
        r"(?i)(<img[^>]* src=(?P<str>[\"'])(?P<fname>[^>]+?)(?P=str)[^>]*>)",
        # unquoted case
        r"(?i)(<img[^>]* src=(?!['\"])(?P<fname>[^ >]+)[^>]*?>)",
    ]
    regexps = soundRegexps + imgRegexps
    db: Optional[DB]

    def __init__(self, col, server: bool) -> None:
        self.col = col
        if server:
            self._dir = None
            return
        # media directory
        self._dir = re.sub(r"(?i)\.(anki2)$", ".media", self.col.path)
        if not os.path.exists(self._dir):
            os.makedirs(self._dir)
        try:
            self._oldcwd = os.getcwd()
        except OSError:
            # cwd doesn't exist
            self._oldcwd = None
        try:
            os.chdir(self._dir)
        except OSError:
            raise Exception("invalidTempFolder")
        # change database
        self.connect()

    def connect(self) -> None:
        if self.col.server:
            return
        path = self.dir() + ".db2"
        create = not os.path.exists(path)
        os.chdir(self._dir)
        self.db = DB(path)
        if create:
            self._initDB()
        self.maybeUpgrade()

    def _initDB(self) -> None:
        self.db.executescript("""
create table media (
 fname text not null primary key,
 csum text,           -- null indicates deleted file
 mtime int not null,  -- zero if deleted
 dirty int not null
);

create index idx_media_dirty on media (dirty);

create table meta (dirMod int, lastUsn int); insert into meta values (0, 0);
""")

    def maybeUpgrade(self) -> None:
        oldpath = self.dir() + ".db"
        if os.path.exists(oldpath):
            self.db.execute('attach "../collection.media.db" as old')
            try:
                self.db.execute("""
    insert into media
     select m.fname, csum, mod, ifnull((select 1 from log l2 where l2.fname=m.fname), 0) as dirty
     from old.media m
     left outer join old.log l using (fname)
     union
     select fname, null, 0, 1 from old.log where type=1;""")
                self.db.execute("delete from meta")
                self.db.execute("""
    insert into meta select dirMod, usn from old.meta
    """)
                self.db.commit()
            except Exception as e:
                # if we couldn't import the old db for some reason, just start
                # anew
                self.col.log("failed to import old media db:" +
                             traceback.format_exc())
            self.db.execute("detach old")
            npath = "../collection.media.db.old"
            if os.path.exists(npath):
                os.unlink(npath)
            os.rename("../collection.media.db", npath)

    def close(self) -> None:
        if self.col.server:
            return
        self.db.close()
        self.db = None
        # change cwd back to old location
        if self._oldcwd:
            try:
                os.chdir(self._oldcwd)
            except:
                # may have been deleted
                pass

    def _deleteDB(self) -> None:
        path = self.db._path
        self.close()
        os.unlink(path)
        self.connect()

    def dir(self) -> Any:
        return self._dir

    def _isFAT32(self) -> bool:
        if not isWin:
            return False
        # pylint: disable=import-error
        import win32api, win32file  # pytype: disable=import-error

        try:
            name = win32file.GetVolumeNameForVolumeMountPoint(self._dir[:3])
        except:
            # mapped & unmapped network drive; pray that it's not vfat
            return False
        if win32api.GetVolumeInformation(name)[4].lower().startswith("fat"):
            return True
        return False

    # Adding media
    ##########################################################################
    # opath must be in unicode

    def addFile(self, opath: str) -> Any:
        with open(opath, "rb") as f:
            return self.writeData(opath, f.read())

    def writeData(self,
                  opath: str,
                  data: bytes,
                  typeHint: Optional[str] = None) -> Any:
        # if fname is a full path, use only the basename
        fname = os.path.basename(opath)

        # if it's missing an extension and a type hint was provided, use that
        if not os.path.splitext(fname)[1] and typeHint:
            # mimetypes is returning '.jpe' even after calling .init(), so we'll do
            # it manually instead
            typeMap = {
                "image/jpeg": ".jpg",
                "image/png": ".png",
            }
            if typeHint in typeMap:
                fname += typeMap[typeHint]

        # make sure we write it in NFC form (pre-APFS Macs will autoconvert to NFD),
        # and return an NFC-encoded reference
        fname = unicodedata.normalize("NFC", fname)
        # ensure it's a valid filename
        base = self.cleanFilename(fname)
        (root, ext) = os.path.splitext(base)

        def repl(match):
            n = int(match.group(1))
            return " (%d)" % (n + 1)

        # find the first available name
        csum = checksum(data)
        while True:
            fname = root + ext
            path = os.path.join(self.dir(), fname)
            # if it doesn't exist, copy it directly
            if not os.path.exists(path):
                with open(path, "wb") as f:
                    f.write(data)
                return fname
            # if it's identical, reuse
            with open(path, "rb") as f:
                if checksum(f.read()) == csum:
                    return fname
            # otherwise, increment the index in the filename
            reg = r" \((\d+)\)$"
            if not re.search(reg, root):
                root = root + " (1)"
            else:
                root = re.sub(reg, repl, root)

    # String manipulation
    ##########################################################################

    def filesInStr(self,
                   mid: Union[int, str],
                   string: str,
                   includeRemote: bool = False) -> List[str]:
        l = []
        model = self.col.models.get(mid)
        strings: List[str] = []
        if model["type"] == MODEL_CLOZE and "{{c" in string:
            # if the field has clozes in it, we'll need to expand the
            # possibilities so we can render latex
            strings = self._expandClozes(string)
        else:
            strings = [string]
        for string in strings:
            # handle latex
            string = mungeQA(string, None, None, model, None, self.col)
            # extract filenames
            for reg in self.regexps:
                for match in re.finditer(reg, string):
                    fname = match.group("fname")
                    isLocal = not re.match("(https?|ftp)://", fname.lower())
                    if isLocal or includeRemote:
                        l.append(fname)
        return l

    def _expandClozes(self, string: str) -> List[str]:
        ords = set(re.findall(r"{{c(\d+)::.+?}}", string))
        strings = []
        from anki.template.template import (
            clozeReg,
            CLOZE_REGEX_MATCH_GROUP_HINT,
            CLOZE_REGEX_MATCH_GROUP_CONTENT,
        )

        def qrepl(m):
            if m.group(CLOZE_REGEX_MATCH_GROUP_HINT):
                return "[%s]" % m.group(CLOZE_REGEX_MATCH_GROUP_HINT)
            else:
                return "[...]"

        def arepl(m):
            return m.group(CLOZE_REGEX_MATCH_GROUP_CONTENT)

        for ord in ords:
            s = re.sub(clozeReg % ord, qrepl, string)
            s = re.sub(clozeReg % ".+?", arepl, s)
            strings.append(s)
        strings.append(re.sub(clozeReg % ".+?", arepl, string))
        return strings

    def transformNames(self, txt: str, func: Callable) -> Any:
        for reg in self.regexps:
            txt = re.sub(reg, func, txt)
        return txt

    def strip(self, txt: str) -> str:
        for reg in self.regexps:
            txt = re.sub(reg, "", txt)
        return txt

    def escapeImages(self, string: str, unescape: bool = False) -> str:
        fn: Callable
        if unescape:
            fn = urllib.parse.unquote
        else:
            fn = urllib.parse.quote

        def repl(match):
            tag = match.group(0)
            fname = match.group("fname")
            if re.match("(https?|ftp)://", fname):
                return tag
            return tag.replace(fname, fn(fname))

        for reg in self.imgRegexps:
            string = re.sub(reg, repl, string)
        return string

    # Rebuilding DB
    ##########################################################################

    def check(
        self,
        local: Optional[List[str]] = None
    ) -> Tuple[List[str], List[str], List[str]]:
        "Return (missingFiles, unusedFiles)."
        mdir = self.dir()
        # gather all media references in NFC form
        allRefs = set()
        for nid, mid, flds in self.col.db.execute(
                "select id, mid, flds from notes"):
            noteRefs = self.filesInStr(mid, flds)
            # check the refs are in NFC
            for f in noteRefs:
                # if they're not, we'll need to fix them first
                if f != unicodedata.normalize("NFC", f):
                    self._normalizeNoteRefs(nid)
                    noteRefs = self.filesInStr(mid, flds)
                    break
            allRefs.update(noteRefs)
        # loop through media folder
        unused = []
        if local is None:
            files = os.listdir(mdir)
        else:
            files = local
        renamedFiles = False
        dirFound = False
        warnings = []
        for file in files:
            if not local:
                if not os.path.isfile(file):
                    # ignore directories
                    dirFound = True
                    continue
            if file.startswith("_"):
                # leading _ says to ignore file
                continue

            if self.hasIllegal(file):
                name = file.encode(sys.getfilesystemencoding(),
                                   errors="replace")
                name = str(name, sys.getfilesystemencoding())
                warnings.append(
                    _("Invalid file name, please rename: %s") % name)
                continue

            nfcFile = unicodedata.normalize("NFC", file)
            # we enforce NFC fs encoding on non-macs
            if not isMac and not local:
                if file != nfcFile:
                    # delete if we already have the NFC form, otherwise rename
                    if os.path.exists(nfcFile):
                        os.unlink(file)
                        renamedFiles = True
                    else:
                        os.rename(file, nfcFile)
                        renamedFiles = True
                    file = nfcFile
            # compare
            if nfcFile not in allRefs:
                unused.append(file)
            else:
                allRefs.discard(nfcFile)
        # if we renamed any files to nfc format, we must rerun the check
        # to make sure the renamed files are not marked as unused
        if renamedFiles:
            return self.check(local=local)
        nohave = [x for x in allRefs if not x.startswith("_")]
        # make sure the media DB is valid
        try:
            self.findChanges()
        except DBError:
            self._deleteDB()

        if dirFound:
            warnings.append(
                _("Anki does not support files in subfolders of the collection.media folder."
                  ))
        return (nohave, unused, warnings)

    def _normalizeNoteRefs(self, nid) -> None:
        note = self.col.getNote(nid)
        for c, fld in enumerate(note.fields):
            nfc = unicodedata.normalize("NFC", fld)
            if nfc != fld:
                note.fields[c] = nfc
        note.flush()

    # Copying on import
    ##########################################################################

    def have(self, fname: str) -> bool:
        return os.path.exists(os.path.join(self.dir(), fname))

    # Illegal characters and paths
    ##########################################################################

    _illegalCharReg = re.compile(r'[][><:"/?*^\\|\0\r\n]')

    def stripIllegal(self, str: str) -> str:
        return re.sub(self._illegalCharReg, "", str)

    def hasIllegal(self, s: str) -> bool:
        if re.search(self._illegalCharReg, s):
            return True
        try:
            s.encode(sys.getfilesystemencoding())
        except UnicodeEncodeError:
            return True
        return False

    def cleanFilename(self, fname: str) -> str:
        fname = self.stripIllegal(fname)
        fname = self._cleanWin32Filename(fname)
        fname = self._cleanLongFilename(fname)
        if not fname:
            fname = "renamed"

        return fname

    def _cleanWin32Filename(self, fname: str) -> str:
        if not isWin:
            return fname

        # deal with things like con/prn/etc
        p = pathlib.WindowsPath(fname)
        if p.is_reserved():
            fname = "renamed" + fname
            assert not pathlib.WindowsPath(fname).is_reserved()

        return fname

    def _cleanLongFilename(self, fname: str) -> Any:
        # a fairly safe limit that should work on typical windows
        # paths and on eCryptfs partitions, even with a duplicate
        # suffix appended
        namemax = 136

        if isWin:
            pathmax = 240
        else:
            pathmax = 1024

        # cap namemax based on absolute path
        dirlen = len(os.path.dirname(os.path.abspath(fname)))
        remaining = pathmax - dirlen
        namemax = min(remaining, namemax)
        assert namemax > 0

        if len(fname) > namemax:
            head, ext = os.path.splitext(fname)
            headmax = namemax - len(ext)
            head = head[0:headmax]
            fname = head + ext
            assert len(fname) <= namemax

        return fname

    # Tracking changes
    ##########################################################################

    def findChanges(self) -> None:
        "Scan the media folder if it's changed, and note any changes."
        if self._changed():
            self._logChanges()

    def haveDirty(self) -> Any:
        return self.db.scalar("select 1 from media where dirty=1 limit 1")

    def _mtime(self, path: str) -> int:
        return int(os.stat(path).st_mtime)

    def _checksum(self, path: str) -> str:
        with open(path, "rb") as f:
            return checksum(f.read())

    def _changed(self) -> int:
        "Return dir mtime if it has changed since the last findChanges()"
        # doesn't track edits, but user can add or remove a file to update
        mod = self.db.scalar("select dirMod from meta")
        mtime = self._mtime(self.dir())
        if not self._isFAT32() and mod and mod == mtime:
            return False
        return mtime

    def _logChanges(self) -> None:
        (added, removed) = self._changes()
        media = []
        for f, mtime in added:
            media.append((f, self._checksum(f), mtime, 1))
        for f in removed:
            media.append((f, None, 0, 1))
        # update media db
        self.db.executemany("insert or replace into media values (?,?,?,?)",
                            media)
        self.db.execute("update meta set dirMod = ?", self._mtime(self.dir()))
        self.db.commit()

    def _changes(self) -> Tuple[List[Tuple[str, int]], List[str]]:
        self.cache: Dict[str, Any] = {}
        for (name, csum, mod) in self.db.execute(
                "select fname, csum, mtime from media where csum is not null"):
            # previous entries may not have been in NFC form
            normname = unicodedata.normalize("NFC", name)
            self.cache[normname] = [csum, mod, False]
        added = []
        removed = []
        # loop through on-disk files
        with os.scandir(self.dir()) as it:
            for f in it:
                # ignore folders and thumbs.db
                if f.is_dir():
                    continue
                if f.name.lower() == "thumbs.db":
                    continue
                # and files with invalid chars
                if self.hasIllegal(f.name):
                    continue
                # empty files are invalid; clean them up and continue
                sz = f.stat().st_size
                if not sz:
                    os.unlink(f.name)
                    continue
                if sz > 100 * 1024 * 1024:
                    self.col.log("ignoring file over 100MB", f.name)
                    continue
                # check encoding
                normname = unicodedata.normalize("NFC", f.name)
                if not isMac:
                    if f.name != normname:
                        # wrong filename encoding which will cause sync errors
                        if os.path.exists(normname):
                            os.unlink(f.name)
                        else:
                            os.rename(f.name, normname)
                else:
                    # on Macs we can access the file using any normalization
                    pass

                # newly added?
                mtime = int(f.stat().st_mtime)
                if normname not in self.cache:
                    added.append((normname, mtime))
                else:
                    # modified since last time?
                    if mtime != self.cache[normname][1]:
                        # and has different checksum?
                        if self._checksum(normname) != self.cache[normname][0]:
                            added.append((normname, mtime))
                    # mark as used
                    self.cache[normname][2] = True
        # look for any entries in the cache that no longer exist on disk
        for (k, v) in list(self.cache.items()):
            if not v[2]:
                removed.append(k)
        return added, removed

    # Syncing-related
    ##########################################################################

    def lastUsn(self) -> Any:
        return self.db.scalar("select lastUsn from meta")

    def setLastUsn(self, usn) -> None:
        self.db.execute("update meta set lastUsn = ?", usn)
        self.db.commit()

    def syncInfo(self, fname) -> Any:
        ret = self.db.first("select csum, dirty from media where fname=?",
                            fname)
        return ret or (None, 0)

    def markClean(self, fnames) -> None:
        for fname in fnames:
            self.db.execute("update media set dirty=0 where fname=?", fname)

    def syncDelete(self, fname) -> None:
        if os.path.exists(fname):
            os.unlink(fname)
        self.db.execute("delete from media where fname=?", fname)

    def mediaCount(self) -> Any:
        return self.db.scalar(
            "select count() from media where csum is not null")

    def dirtyCount(self) -> Any:
        return self.db.scalar("select count() from media where dirty=1")

    def forceResync(self) -> None:
        self.db.execute("delete from media")
        self.db.execute("update meta set lastUsn=0,dirMod=0")
        self.db.commit()
        self.db.setAutocommit(True)
        self.db.execute("vacuum")
        self.db.execute("analyze")
        self.db.setAutocommit(False)

    # Media syncing: zips
    ##########################################################################

    def mediaChangesZip(self) -> Tuple[bytes, list]:
        f = io.BytesIO()
        z = zipfile.ZipFile(f, "w", compression=zipfile.ZIP_DEFLATED)

        fnames = []
        # meta is list of (fname, zipname), where zipname of None
        # is a deleted file
        meta = []
        sz = 0

        for c, (fname, csum) in enumerate(
                self.db.execute("select fname, csum from media where dirty=1"
                                " limit %d" % SYNC_ZIP_COUNT)):

            fnames.append(fname)
            normname = unicodedata.normalize("NFC", fname)

            if csum:
                self.col.log("+media zip", fname)
                z.write(fname, str(c))
                meta.append((normname, str(c)))
                sz += os.path.getsize(fname)
            else:
                self.col.log("-media zip", fname)
                meta.append((normname, ""))

            if sz >= SYNC_ZIP_SIZE:
                break

        z.writestr("_meta", json.dumps(meta))
        z.close()
        return f.getvalue(), fnames

    def addFilesFromZip(self, zipData) -> int:
        "Extract zip data; true if finished."
        f = io.BytesIO(zipData)
        z = zipfile.ZipFile(f, "r")
        media = []
        # get meta info first
        meta = json.loads(z.read("_meta").decode("utf8"))
        # then loop through all files
        cnt = 0
        for i in z.infolist():
            if i.filename == "_meta":
                # ignore previously-retrieved meta
                continue
            else:
                data = z.read(i)
                csum = checksum(data)
                name = meta[i.filename]
                # normalize name
                name = unicodedata.normalize("NFC", name)
                # save file
                with open(name, "wb") as f:  # type: ignore
                    f.write(data)
                # update db
                media.append((name, csum, self._mtime(name), 0))
                cnt += 1
        if media:
            self.db.executemany(
                "insert or replace into media values (?,?,?,?)", media)
        return cnt
def create_new_db(path):
    db = DB(path)
    db.setAutocommit(True)
    _createDB(db)
    db.__exit__(db)
Ejemplo n.º 21
0
 def check(self, path):
     "True if deck looks ok."
     with DB(self._utf8(path)) as db:
         return self._check(db)
Ejemplo n.º 22
0
 def _openDB(self, path):
     self.tmppath = tmpfile(suffix=".anki2")
     shutil.copy(path, self._utf8(self.tmppath))
     self.db = DB(self.tmppath)
Ejemplo n.º 23
0
def Collection(path, lock=True, server=False, sync=True, log=False):
    "Open a new or existing collection. Path must be unicode."
    assert path.endswith(".anki2")
    path = os.path.abspath(path)
    create = not os.path.exists(path)
    if create:
        base = os.path.basename(path)
        for c in ("/", ":", "\\"):
            assert c not in base
    # connect
    db = DB(path)
    db.setAutocommit(True)
    if create:
        ver = _createDB(db)
    else:
        ver = _upgradeSchema(db)
    db.execute("pragma temp_store = memory")
    if sync:
        db.execute("pragma cache_size = 10000")
        db.execute("pragma journal_mode = wal")
    else:
        db.execute("pragma synchronous = off")
    db.setAutocommit(False)
    # add db to col and do any remaining upgrades
    col = _Collection(db, server, log)
    if ver < SCHEMA_VERSION:
        _upgrade(col, ver)
    elif create:
        # add in reverse order so basic is default
        addClozeModel(col)
        addForwardOptionalReverse(col)
        addForwardReverse(col)
        addBasicModel(col)
        col.save()
    if lock:
        col.lock()
    return col
Ejemplo n.º 24
0
def Collection(path: str,
               lock: bool = True,
               server: Optional[ServerData] = None,
               log: bool = False) -> _Collection:
    "Open a new or existing collection. Path must be unicode."
    assert path.endswith(".anki2")
    (media_dir, media_db) = media_paths_from_col_path(path)
    log_path = ""
    if not server:
        log_path = path.replace(".anki2", "2.log")
    backend = RustBackend(path, media_dir, media_db, log_path)
    path = os.path.abspath(path)
    create = not os.path.exists(path)
    if create:
        base = os.path.basename(path)
        for c in ("/", ":", "\\"):
            assert c not in base
    # connect
    db = DB(path)
    db.setAutocommit(True)
    if create:
        ver = _createDB(db)
    else:
        ver = _upgradeSchema(db)
    db.execute("pragma temp_store = memory")
    db.execute("pragma cache_size = 10000")
    if not isWin:
        db.execute("pragma journal_mode = wal")
    db.setAutocommit(False)
    # add db to col and do any remaining upgrades
    col = _Collection(db, backend=backend, server=server, log=log)
    if ver < SCHEMA_VERSION:
        _upgrade(col, ver)
    elif ver > SCHEMA_VERSION:
        raise Exception("This file requires a newer version of Anki.")
    elif create:
        # add in reverse order so basic is default
        addClozeModel(col)
        addBasicTypingModel(col)
        addForwardOptionalReverse(col)
        addForwardReverse(col)
        addBasicModel(col)
        col.save()
    if lock:
        try:
            col.lock()
        except:
            col.db.close()
            raise
    return col
Ejemplo n.º 25
0
class ProfileManager:
    def __init__(self, base=None):
        ## Settings which should be forgotten each Anki restart
        self.session = {}
        self.name = None
        self.db = None
        self.profile: Optional[Dict] = None
        # instantiate base folder
        self.base: str
        self._setBaseFolder(base)

    def setupMeta(self) -> LoadMetaResult:
        # load metadata
        res = self._loadMeta()
        self.firstRun = res.firstTime
        return res

    # profile load on startup
    def openProfile(self, profile) -> None:
        if profile:
            if profile not in self.profiles():
                QMessageBox.critical(
                    None, tr(TR.QT_MISC_ERROR), tr(TR.PROFILES_PROFILE_DOES_NOT_EXIST)
                )
                sys.exit(1)
            try:
                self.load(profile)
            except TypeError as exc:
                raise Exception("Provided profile does not exist.") from exc

    # Base creation
    ######################################################################

    def ensureBaseExists(self) -> None:
        self._ensureExists(self.base)

    # Folder migration
    ######################################################################

    def _oldFolderLocation(self) -> str:
        if isMac:
            return os.path.expanduser("~/Documents/Anki")
        elif isWin:
            from aqt.winpaths import get_personal

            return os.path.join(get_personal(), "Anki")
        else:
            p = os.path.expanduser("~/Anki")
            if os.path.isdir(p):
                return p
            return os.path.expanduser("~/Documents/Anki")

    def maybeMigrateFolder(self):
        newBase = self.base
        oldBase = self._oldFolderLocation()

        if oldBase and not os.path.exists(self.base) and os.path.isdir(oldBase):
            try:
                # if anything goes wrong with UI, reset to the old behavior of always migrating
                self._tryToMigrateFolder(oldBase)
            except AnkiRestart:
                raise
            except:
                print("migration failed")
                self.base = newBase
                shutil.move(oldBase, self.base)

    def _tryToMigrateFolder(self, oldBase):
        from PyQt5 import QtGui, QtWidgets

        app = QtWidgets.QApplication([])
        icon = QtGui.QIcon()
        icon.addPixmap(
            QtGui.QPixmap(":/icons/anki.png"),
            QtGui.QIcon.Normal,
            QtGui.QIcon.Off,
        )
        window_title = "Data Folder Migration"
        migration_directories = f"\n\n    {oldBase}\n\nto\n\n    {self.base}"

        confirmation = QMessageBox()
        confirmation.setIcon(QMessageBox.Warning)
        confirmation.setWindowIcon(icon)
        confirmation.setStandardButtons(QMessageBox.Ok | QMessageBox.Cancel)  # type: ignore
        confirmation.setWindowTitle(window_title)
        confirmation.setText(
            "Anki needs to move its data folder from Documents/Anki to a new location. Proceed?"
        )
        retval = confirmation.exec()

        if retval == QMessageBox.Ok:
            progress = QMessageBox()
            progress.setIcon(QMessageBox.Information)
            progress.setStandardButtons(QMessageBox.NoButton)
            progress.setWindowIcon(icon)
            progress.setWindowTitle(window_title)
            progress.setText("Please wait...")
            progress.show()
            app.processEvents()  # type: ignore

            shutil.move(oldBase, self.base)
            progress.hide()

            completion = QMessageBox()
            completion.setIcon(QMessageBox.Information)
            completion.setStandardButtons(QMessageBox.Ok)
            completion.setWindowIcon(icon)
            completion.setWindowTitle(window_title)
            completion.setText("Migration complete. Please start Anki again.")
            completion.show()
            completion.exec()
        else:
            diag = QMessageBox()
            diag.setIcon(QMessageBox.Warning)
            diag.setWindowIcon(icon)
            diag.setStandardButtons(QMessageBox.Ok)
            diag.setWindowTitle(window_title)
            diag.setText(
                "Migration aborted. If you would like to keep the old folder location, please "
                "see the Startup Options section of the manual. Anki will now quit."
            )
            diag.exec()

        raise AnkiRestart(exitcode=0)

    # Profile load/save
    ######################################################################

    def profiles(self) -> List:
        def names() -> List:
            return self.db.list("select name from profiles where name != '_global'")

        n = names()
        if not n:
            self._ensureProfile()
            n = names()

        return n

    def _unpickle(self, data) -> Any:
        class Unpickler(pickle.Unpickler):
            def find_class(self, module: str, name: str) -> Any:
                if module == "PyQt5.sip":
                    try:
                        import PyQt5.sip  # pylint: disable=unused-import
                    except:
                        # use old sip location
                        module = "sip"
                fn = super().find_class(module, name)
                if module == "sip" and name == "_unpickle_type":

                    def wrapper(mod, obj, args):
                        if mod.startswith("PyQt4") and obj == "QByteArray":
                            # can't trust str objects from python 2
                            return QByteArray()
                        return fn(mod, obj, args)

                    return wrapper
                else:
                    return fn

        up = Unpickler(io.BytesIO(data), errors="ignore")
        return up.load()

    def _pickle(self, obj) -> bytes:
        # pyqt needs to be updated to fix
        # 'PY_SSIZE_T_CLEAN will be required for '#' formats' warning
        # check if this is still required for pyqt6
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            return pickle.dumps(obj, protocol=4)

    def load(self, name) -> bool:
        assert name != "_global"
        data = self.db.scalar(
            "select cast(data as blob) from profiles where name = ?", name
        )
        self.name = name
        try:
            self.profile = self._unpickle(data)
        except:
            QMessageBox.warning(
                None,
                tr(TR.PROFILES_PROFILE_CORRUPT),
                tr(TR.PROFILES_ANKI_COULD_NOT_READ_YOUR_PROFILE),
            )
            traceback.print_stack()
            print("resetting corrupt profile")
            self.profile = profileConf.copy()
            self.save()
        return True

    def save(self) -> None:
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, self._pickle(self.profile), self.name)
        self.db.execute(sql, self._pickle(self.meta), "_global")
        self.db.commit()

    def create(self, name) -> None:
        prof = profileConf.copy()
        self.db.execute(
            "insert or ignore into profiles values (?, ?)", name, self._pickle(prof)
        )
        self.db.commit()

    def remove(self, name) -> None:
        p = self.profileFolder()
        if os.path.exists(p):
            send2trash(p)
        self.db.execute("delete from profiles where name = ?", name)
        self.db.commit()

    def trashCollection(self) -> None:
        p = self.collectionPath()
        if os.path.exists(p):
            send2trash(p)

    def rename(self, name) -> None:
        oldName = self.name
        oldFolder = self.profileFolder()
        self.name = name
        newFolder = self.profileFolder(create=False)
        if os.path.exists(newFolder):
            if (oldFolder != newFolder) and (oldFolder.lower() == newFolder.lower()):
                # OS is telling us the folder exists because it does not take
                # case into account; use a temporary folder location
                midFolder = "".join([oldFolder, "-temp"])
                if not os.path.exists(midFolder):
                    os.rename(oldFolder, midFolder)
                    oldFolder = midFolder
                else:
                    showWarning(
                        tr(TR.PROFILES_PLEASE_REMOVE_THE_FOLDER_AND, val=midFolder)
                    )
                    self.name = oldName
                    return
            else:
                showWarning(tr(TR.PROFILES_FOLDER_ALREADY_EXISTS))
                self.name = oldName
                return

        # update name
        self.db.execute("update profiles set name = ? where name = ?", name, oldName)
        # rename folder
        try:
            os.rename(oldFolder, newFolder)
        except Exception as e:
            self.db.rollback()
            if "WinError 5" in str(e):
                showWarning(tr(TR.PROFILES_ANKI_COULD_NOT_RENAME_YOUR_PROFILE))
            else:
                raise
        except:
            self.db.rollback()
            raise
        else:
            self.db.commit()

    # Folder handling
    ######################################################################

    def profileFolder(self, create=True) -> str:
        path = os.path.join(self.base, self.name)
        if create:
            self._ensureExists(path)
        return path

    def addonFolder(self) -> str:
        return self._ensureExists(os.path.join(self.base, "addons21"))

    def backupFolder(self) -> str:
        return self._ensureExists(os.path.join(self.profileFolder(), "backups"))

    def collectionPath(self) -> str:
        return os.path.join(self.profileFolder(), "collection.anki2")

    # Downgrade
    ######################################################################

    def downgrade(self, profiles=List[str]) -> List[str]:
        "Downgrade all profiles. Return a list of profiles that couldn't be opened."
        problem_profiles = []
        for name in profiles:
            path = os.path.join(self.base, name, "collection.anki2")
            if not os.path.exists(path):
                continue
            with DB(path) as db:
                if db.scalar("select ver from col") == 11:
                    # nothing to do
                    continue
            try:
                c = Collection(path)
                c.close(save=False, downgrade=True)
            except Exception as e:
                print(e)
                problem_profiles.append(name)
        return problem_profiles

    # Helpers
    ######################################################################

    def _ensureExists(self, path: str) -> str:
        if not os.path.exists(path):
            os.makedirs(path)
        return path

    def _setBaseFolder(self, cmdlineBase: None) -> None:
        if cmdlineBase:
            self.base = os.path.abspath(cmdlineBase)
        elif os.environ.get("ANKI_BASE"):
            self.base = os.path.abspath(os.environ["ANKI_BASE"])
        else:
            self.base = self._defaultBase()
            self.maybeMigrateFolder()
        self.ensureBaseExists()

    def _defaultBase(self) -> str:
        if isWin:
            from aqt.winpaths import get_appdata

            return os.path.join(get_appdata(), "Anki2")
        elif isMac:
            return os.path.expanduser("~/Library/Application Support/Anki2")
        else:
            dataDir = os.environ.get(
                "XDG_DATA_HOME", os.path.expanduser("~/.local/share")
            )
            if not os.path.exists(dataDir):
                os.makedirs(dataDir)
            return os.path.join(dataDir, "Anki2")

    def _loadMeta(self, retrying=False) -> LoadMetaResult:
        result = LoadMetaResult()
        result.firstTime = False
        result.loadError = retrying

        opath = os.path.join(self.base, "prefs.db")
        path = os.path.join(self.base, "prefs21.db")
        if not retrying and os.path.exists(opath) and not os.path.exists(path):
            shutil.copy(opath, path)

        result.firstTime = not os.path.exists(path)

        def recover() -> None:
            # if we can't load profile, start with a new one
            if self.db:
                try:
                    self.db.close()
                except:
                    pass
            for suffix in ("", "-journal"):
                fpath = path + suffix
                if os.path.exists(fpath):
                    os.unlink(fpath)

        # open DB file and read data
        try:
            self.db = DB(path)
            assert self.db.scalar("pragma integrity_check") == "ok"
            self.db.execute(
                """
create table if not exists profiles
(name text primary key, data blob not null);"""
            )
            data = self.db.scalar(
                "select cast(data as blob) from profiles where name = '_global'"
            )
        except:
            traceback.print_stack()
            if result.loadError:
                # already failed, prevent infinite loop
                raise
            # delete files and try again
            recover()
            return self._loadMeta(retrying=True)

        # try to read data
        if not result.firstTime:
            try:
                self.meta = self._unpickle(data)
                return result
            except:
                traceback.print_stack()
                print("resetting corrupt _global")
                result.loadError = True
                result.firstTime = True

        # if new or read failed, create a default global profile
        self.meta = metaConf.copy()
        self.db.execute(
            "insert or replace into profiles values ('_global', ?)",
            self._pickle(metaConf),
        )
        return result

    def _ensureProfile(self) -> None:
        "Create a new profile if none exists."
        self.create(tr(TR.PROFILES_USER_1))
        p = os.path.join(self.base, "README.txt")
        with open(p, "w", encoding="utf8") as file:
            file.write(
                without_unicode_isolation(
                    tr(
                        TR.PROFILES_FOLDER_README,
                        link=appHelpSite + "files?id=startup-options",
                    )
                )
            )

    # Default language
    ######################################################################
    # On first run, allow the user to choose the default language

    def setDefaultLang(self, idx: int) -> None:
        # create dialog
        class NoCloseDiag(QDialog):
            def reject(self):
                pass

        d = self.langDiag = NoCloseDiag()
        f = self.langForm = aqt.forms.setlang.Ui_Dialog()
        f.setupUi(d)
        d.setWindowFlags(self.windowFlags() & ~Qt.WindowContextHelpButtonHint)  # type: ignore
        qconnect(d.accepted, self._onLangSelected)
        qconnect(d.rejected, lambda: True)
        # update list
        f.lang.addItems([x[0] for x in anki.lang.langs])
        f.lang.setCurrentRow(idx)
        d.exec_()

    def _onLangSelected(self) -> None:
        f = self.langForm
        obj = anki.lang.langs[f.lang.currentRow()]
        code = obj[1]
        name = obj[0]
        r = QMessageBox.question(
            None, "Anki", tr(TR.PROFILES_CONFIRM_LANG_CHOICE, lang=name), QMessageBox.Yes | QMessageBox.No, QMessageBox.No  # type: ignore
        )
        if r != QMessageBox.Yes:
            return self.setDefaultLang(f.lang.currentRow())
        self.setLang(code)

    def setLang(self, code) -> None:
        self.meta["defaultLang"] = code
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, self._pickle(self.meta), "_global")
        self.db.commit()
        anki.lang.set_lang(code, locale_dir())

    # OpenGL
    ######################################################################

    def _gldriver_path(self) -> str:
        return os.path.join(self.base, "gldriver")

    def video_driver(self) -> VideoDriver:
        path = self._gldriver_path()
        try:
            with open(path) as file:
                text = file.read().strip()
                return VideoDriver(text).constrained_to_platform()
        except (ValueError, OSError):
            return VideoDriver.default_for_platform()

    def set_video_driver(self, driver: VideoDriver) -> None:
        with open(self._gldriver_path(), "w") as file:
            file.write(driver.value)

    def set_next_video_driver(self) -> None:
        self.set_video_driver(self.video_driver().next())

    # Shared options
    ######################################################################

    def uiScale(self) -> float:
        scale = self.meta.get("uiScale", 1.0)
        return max(scale, 1)

    def setUiScale(self, scale: float) -> None:
        self.meta["uiScale"] = scale

    def last_addon_update_check(self) -> int:
        return self.meta.get("last_addon_update_check", 0)

    def set_last_addon_update_check(self, secs) -> None:
        self.meta["last_addon_update_check"] = secs

    def night_mode(self) -> bool:
        return self.meta.get("night_mode", False)

    def set_night_mode(self, on: bool) -> None:
        self.meta["night_mode"] = on

    def dark_mode_widgets(self) -> bool:
        return self.meta.get("dark_mode_widgets", False)

    # Profile-specific
    ######################################################################

    def interrupt_audio(self) -> bool:
        return self.profile.get("interrupt_audio", True)

    def set_interrupt_audio(self, val: bool) -> None:
        self.profile["interrupt_audio"] = val
        aqt.sound.av_player.interrupt_current_audio = val

    def set_sync_key(self, val: Optional[str]) -> None:
        self.profile["syncKey"] = val

    def set_sync_username(self, val: Optional[str]) -> None:
        self.profile["syncUser"] = val

    def set_host_number(self, val: Optional[int]) -> None:
        self.profile["hostNum"] = val or 0

    def media_syncing_enabled(self) -> bool:
        return self.profile["syncMedia"]

    def auto_syncing_enabled(self) -> bool:
        return self.profile["autoSync"]

    def sync_auth(self) -> Optional[SyncAuth]:
        hkey = self.profile.get("syncKey")
        if not hkey:
            return None
        return SyncAuth(hkey=hkey, host_number=self.profile.get("hostNum", 0))

    def clear_sync_auth(self) -> None:
        self.profile["syncKey"] = None
        self.profile["syncUser"] = None
        self.profile["hostNum"] = 0

    def auto_sync_media_minutes(self) -> int:
        return self.profile.get("autoSyncMediaMinutes", 15)

    def set_auto_sync_media_minutes(self, val: int) -> None:
        self.profile["autoSyncMediaMinutes"] = val

    def recording_driver(self) -> RecordingDriver:
        if driver := self.profile.get("recordingDriver"):
            try:
                return RecordingDriver(driver)
            except ValueError:
                # revert to default
                pass
        return RecordingDriver.QtAudioInput
Ejemplo n.º 26
0
class MediaManager:

    soundRegexps = ["(?i)(\[sound:(?P<fname>[^]]+)\])"]
    imgRegexps = [
        # src element quoted case
        "(?i)(<img[^>]* src=(?P<str>[\"'])(?P<fname>[^>]+?)(?P=str)[^>]*>)",
        # unquoted case
        "(?i)(<img[^>]* src=(?!['\"])(?P<fname>[^ >]+)[^>]*?>)",
    ]
    regexps = soundRegexps + imgRegexps

    def __init__(self, col, server):
        self.col = col
        if server:
            self._dir = None
            return
        # media directory
        self._dir = re.sub("(?i)\.(anki2)$", ".media", self.col.path)
        if not os.path.exists(self._dir):
            os.makedirs(self._dir)
        # change database
        self.connect()

    def connect(self):
        if self.col.server:
            return
        path = self.dir()+".db2"
        create = not os.path.exists(path)
        self.db = DB(path)
        if create:
            self._initDB()
        self.maybeUpgrade()

    def _initDB(self):
        self.db.executescript("""
create table media (
 fname text not null primary key,
 csum text,           -- null indicates deleted file
 mtime int not null,  -- zero if deleted
 dirty int not null
);

create index idx_media_dirty on media (dirty);

create table meta (dirMod int, lastUsn int); insert into meta values (0, 0);
""")

    def maybeUpgrade(self):
        oldpath = self.dir()+".db"
        if os.path.exists(oldpath):
            self.db.execute('attach "../collection.media.db" as old')
            try:
                self.db.execute("""
    insert into media
     select m.fname, csum, mod, ifnull((select 1 from log l2 where l2.fname=m.fname), 0) as dirty
     from old.media m
     left outer join old.log l using (fname)
     union
     select fname, null, 0, 1 from old.log where type=1;""")
                self.db.execute("delete from meta")
                self.db.execute("""
    insert into meta select dirMod, usn from old.meta
    """)
                self.db.commit()
            except Exception as e:
                # if we couldn't import the old db for some reason, just start
                # anew
                self.col.log("failed to import old media db:"+traceback.format_exc())
            self.db.execute("detach old")
            npath = os.path.join(self.dir(), "collection.media.db.old")
            if os.path.exists(npath):
                os.unlink(npath)
            os.rename(os.path.join(self.dir(), "collection.media.db"), npath)

    def close(self):
        if self.col.server:
            return
        self.db.close()
        self.db = None

    def dir(self):
        return self._dir

    def _isFAT32(self):
        if not isWin:
            return
        import win32api, win32file
        try:
            name = win32file.GetVolumeNameForVolumeMountPoint(self._dir[:3])
        except:
            # mapped & unmapped network drive; pray that it's not vfat
            return
        if win32api.GetVolumeInformation(name)[4].lower().startswith("fat"):
            return True

    # Adding media
    ##########################################################################
    # opath must be in unicode

    def addFile(self, opath):
        return self.writeData(opath, open(opath, "rb").read())

    def writeData(self, opath, data):
        # if fname is a full path, use only the basename
        fname = os.path.basename(opath)
        # make sure we write it in NFC form (on mac will autoconvert to NFD),
        # and return an NFC-encoded reference
        fname = unicodedata.normalize("NFC", fname)
        # remove any dangerous characters
        base = self.stripIllegal(fname)
        (root, ext) = os.path.splitext(base)
        def repl(match):
            n = int(match.group(1))
            return " (%d)" % (n+1)
        # find the first available name
        csum = checksum(data)
        while True:
            fname = root + ext
            path = os.path.join(self.dir(), fname)
            # if it doesn't exist, copy it directly
            if not os.path.exists(path):
                open(path, "wb").write(data)
                return fname
            # if it's identical, reuse
            if checksum(open(path, "rb").read()) == csum:
                return fname
            # otherwise, increment the index in the filename
            reg = " \((\d+)\)$"
            if not re.search(reg, root):
                root = root + " (1)"
            else:
                root = re.sub(reg, repl, root)

    # String manipulation
    ##########################################################################

    def filesInStr(self, mid, string, includeRemote=False):
        l = []
        model = self.col.models.get(mid)
        strings = []
        if model['type'] == MODEL_CLOZE and "{{c" in string:
            # if the field has clozes in it, we'll need to expand the
            # possibilities so we can render latex
            strings = self._expandClozes(string)
        else:
            strings = [string]
        for string in strings:
            # handle latex
            string = mungeQA(string, None, None, model, None, self.col)
            # extract filenames
            for reg in self.regexps:
                for match in re.finditer(reg, string):
                    fname = match.group("fname")
                    isLocal = not re.match("(https?|ftp)://", fname.lower())
                    if isLocal or includeRemote:
                        l.append(fname)
        return l

    def _expandClozes(self, string):
        ords = set(re.findall("{{c(\d+)::.+?}}", string))
        strings = []
        from anki.template.template import clozeReg
        def qrepl(m):
            if m.group(3):
                return "[%s]" % m.group(3)
            else:
                return "[...]"
        def arepl(m):
            return m.group(1)
        for ord in ords:
            s = re.sub(clozeReg%ord, qrepl, string)
            s = re.sub(clozeReg%".+?", "\\1", s)
            strings.append(s)
        strings.append(re.sub(clozeReg%".+?", arepl, string))
        return strings

    def transformNames(self, txt, func):
        for reg in self.regexps:
            txt = re.sub(reg, func, txt)
        return txt

    def strip(self, txt):
        for reg in self.regexps:
            txt = re.sub(reg, "", txt)
        return txt

    def escapeImages(self, string, unescape=False):
        if unescape:
            fn = urllib.parse.unquote
        else:
            fn = urllib.parse.quote
        def repl(match):
            tag = match.group(0)
            fname = match.group("fname")
            if re.match("(https?|ftp)://", fname):
                return tag
            return tag.replace(fname, fn(fname))
        for reg in self.imgRegexps:
            string = re.sub(reg, repl, string)
        return string

    # Rebuilding DB
    ##########################################################################

    def check(self, local=None):
        "Return (missingFiles, unusedFiles)."
        mdir = self.dir()
        # gather all media references in NFC form
        allRefs = set()
        for nid, mid, flds in self.col.db.execute("select id, mid, flds from notes"):
            noteRefs = self.filesInStr(mid, flds)
            # check the refs are in NFC
            for f in noteRefs:
                # if they're not, we'll need to fix them first
                if f != unicodedata.normalize("NFC", f):
                    self._normalizeNoteRefs(nid)
                    noteRefs = self.filesInStr(mid, flds)
                    break
            allRefs.update(noteRefs)
        # loop through media folder
        unused = []
        invalid = []
        if local is None:
            files = os.listdir(mdir)
        else:
            files = local
        renamedFiles = False
        for file in files:
            path = os.path.join(self.dir(), file)
            if not local:
                if not os.path.isfile(path):
                    # ignore directories
                    continue
            if file.startswith("_"):
                # leading _ says to ignore file
                continue
            nfcFile = unicodedata.normalize("NFC", file)
            nfcPath = os.path.join(self.dir(), nfcFile)
            # we enforce NFC fs encoding on non-macs; on macs we'll have gotten
            # NFD so we use the above variable for comparing references
            if not isMac and not local:
                if file != nfcFile:
                    # delete if we already have the NFC form, otherwise rename
                    if os.path.exists(nfcPath):
                        os.unlink(path)
                        renamedFiles = True
                    else:
                        os.rename(path, nfcPath)
                        renamedFiles = True
                    file = nfcFile
            # compare
            if nfcFile not in allRefs:
                unused.append(file)
            else:
                allRefs.discard(nfcFile)
        # if we renamed any files to nfc format, we must rerun the check
        # to make sure the renamed files are not marked as unused
        if renamedFiles:
            return self.check(local=local)
        nohave = [x for x in allRefs if not x.startswith("_")]
        return (nohave, unused, invalid)

    def _normalizeNoteRefs(self, nid):
        note = self.col.getNote(nid)
        for c, fld in enumerate(note.fields):
            nfc = unicodedata.normalize("NFC", fld)
            if nfc != fld:
                note.fields[c] = nfc
        note.flush()

    # Copying on import
    ##########################################################################

    def have(self, fname):
        return os.path.exists(os.path.join(self.dir(), fname))

    # Illegal characters
    ##########################################################################

    _illegalCharReg = re.compile(r'[][><:"/?*^\\|\0\r\n]')

    def stripIllegal(self, str):
        return re.sub(self._illegalCharReg, "", str)

    def hasIllegal(self, str):
        return not not re.search(self._illegalCharReg, str)

    # Tracking changes
    ##########################################################################

    def findChanges(self):
        "Scan the media folder if it's changed, and note any changes."
        if self._changed():
            self._logChanges()

    def haveDirty(self):
        return self.db.scalar("select 1 from media where dirty=1 limit 1")

    def _mtime(self, path):
        return int(os.stat(path).st_mtime)

    def _checksum(self, path):
        return checksum(open(path, "rb").read())

    def _changed(self):
        "Return dir mtime if it has changed since the last findChanges()"
        # doesn't track edits, but user can add or remove a file to update
        mod = self.db.scalar("select dirMod from meta")
        mtime = self._mtime(self.dir())
        if not self._isFAT32() and mod and mod == mtime:
            return False
        return mtime

    def _logChanges(self):
        (added, removed) = self._changes()
        media = []
        for f in added:
            path = os.path.join(self.dir(), f)
            mt = self._mtime(path)
            media.append((f, self._checksum(path), mt, 1))
        for f in removed:
            media.append((f, None, 0, 1))
        # update media db
        self.db.executemany("insert or replace into media values (?,?,?,?)",
                            media)
        self.db.execute("update meta set dirMod = ?", self._mtime(self.dir()))
        self.db.commit()

    def _changes(self):
        self.cache = {}
        for (name, csum, mod) in self.db.execute(
            "select fname, csum, mtime from media where csum is not null"):
            self.cache[name] = [csum, mod, False]
        added = []
        removed = []
        # loop through on-disk files
        for f in os.listdir(self.dir()):
            path = os.path.join(self.dir(), f)
            # ignore folders and thumbs.db
            if os.path.isdir(path):
                continue
            if f.lower() == "thumbs.db":
                continue
            # and files with invalid chars
            if self.hasIllegal(f):
                continue
            # empty files are invalid; clean them up and continue
            sz = os.path.getsize(path)
            if not sz:
                os.unlink(path)
                continue
            if sz > 100*1024*1024:
                self.col.log("ignoring file over 100MB", f)
                continue
            # check encoding
            if not isMac:
                normf = unicodedata.normalize("NFC", f)
                normpath = os.path.join(self.dir(), normf)
                if f != normf:
                    # wrong filename encoding which will cause sync errors
                    if os.path.exists(normpath):
                        os.unlink(path)
                    else:
                        os.rename(path, normpath)
            # newly added?
            if f not in self.cache:
                added.append(f)
            else:
                # modified since last time?
                if self._mtime(path) != self.cache[f][1]:
                    # and has different checksum?
                    if self._checksum(path) != self.cache[f][0]:
                        added.append(f)
                # mark as used
                self.cache[f][2] = True
        # look for any entries in the cache that no longer exist on disk
        for (k, v) in list(self.cache.items()):
            if not v[2]:
                removed.append(k)
        return added, removed

    # Syncing-related
    ##########################################################################

    def lastUsn(self):
        return self.db.scalar("select lastUsn from meta")

    def setLastUsn(self, usn):
        self.db.execute("update meta set lastUsn = ?", usn)
        self.db.commit()

    def syncInfo(self, fname):
        ret = self.db.first(
            "select csum, dirty from media where fname=?", fname)
        return ret or (None, 0)

    def markClean(self, fnames):
        for fname in fnames:
            self.db.execute(
                "update media set dirty=0 where fname=?", fname)

    def syncDelete(self, fname):
        path = os.path.join(self.dir(), fname)
        if os.path.exists(path):
            os.unlink(path)
        self.db.execute("delete from media where fname=?", fname)

    def mediaCount(self):
        return self.db.scalar(
            "select count() from media where csum is not null")

    def dirtyCount(self):
        return self.db.scalar(
            "select count() from media where dirty=1")

    def forceResync(self):
        self.db.execute("delete from media")
        self.db.execute("update meta set lastUsn=0,dirMod=0")
        self.db.commit()
        self.db.setAutocommit(True)
        self.db.execute("vacuum")
        self.db.execute("analyze")
        self.db.setAutocommit(False)

    # Media syncing: zips
    ##########################################################################

    def mediaChangesZip(self):
        f = io.BytesIO()
        z = zipfile.ZipFile(f, "w", compression=zipfile.ZIP_DEFLATED)

        fnames = []
        # meta is list of (fname, zipname), where zipname of None
        # is a deleted file
        meta = []
        sz = 0

        for c, (fname, csum) in enumerate(self.db.execute(
                        "select fname, csum from media where dirty=1"
                        " limit %d"%SYNC_ZIP_COUNT)):

            path = os.path.join(self.dir(), fname)
            fnames.append(fname)
            normname = unicodedata.normalize("NFC", fname)

            if csum:
                self.col.log("+media zip", fname)
                z.write(path, str(c))
                meta.append((normname, str(c)))
                sz += os.path.getsize(path)
            else:
                self.col.log("-media zip", fname)
                meta.append((normname, ""))

            if sz >= SYNC_ZIP_SIZE:
                break

        z.writestr("_meta", json.dumps(meta))
        z.close()
        return f.getvalue(), fnames

    def addFilesFromZip(self, zipData):
        "Extract zip data; true if finished."
        f = io.BytesIO(zipData)
        z = zipfile.ZipFile(f, "r")
        media = []
        # get meta info first
        meta = json.loads(z.read("_meta").decode("utf8"))
        # then loop through all files
        cnt = 0
        for i in z.infolist():
            if i.filename == "_meta":
                # ignore previously-retrieved meta
                continue
            else:
                data = z.read(i)
                csum = checksum(data)
                name = meta[i.filename]
                # normalize name for platform
                if isMac:
                    name = unicodedata.normalize("NFD", name)
                else:
                    name = unicodedata.normalize("NFC", name)
                # save file
                path = os.path.join(self.dir(), name)
                open(path, "wb").write(data)
                # update db
                media.append((name, csum, self._mtime(path), 0))
                cnt += 1
        if media:
            self.db.executemany(
                "insert or replace into media values (?,?,?,?)", media)
        return cnt
Ejemplo n.º 27
0
class ProfileManager:
    def __init__(self, base=None):
        self.name = None
        self.db = None
        # instantiate base folder
        self._setBaseFolder(base)

        aqt.sound.setMpvConfigBase(self.base)

    def setupMeta(self) -> LoadMetaResult:
        # load metadata
        res = self._loadMeta()
        self.firstRun = res.firstTime
        return res

    # profile load on startup
    def openProfile(self, profile):
        if profile:
            if profile not in self.profiles():
                QMessageBox.critical(None, "Error",
                                     "Requested profile does not exist.")
                sys.exit(1)
            try:
                self.load(profile)
            except TypeError:
                raise Exception("Provided profile does not exist.")

    # Base creation
    ######################################################################

    def ensureBaseExists(self):
        self._ensureExists(self.base)

    # Folder migration
    ######################################################################

    def _oldFolderLocation(self):
        if isMac:
            return os.path.expanduser("~/Documents/Anki")
        elif isWin:
            from aqt.winpaths import get_personal

            return os.path.join(get_personal(), "Anki")
        else:
            p = os.path.expanduser("~/Anki")
            if os.path.isdir(p):
                return p
            return os.path.expanduser("~/Documents/Anki")

    def maybeMigrateFolder(self):
        oldBase = self._oldFolderLocation()

        if oldBase and not os.path.exists(
                self.base) and os.path.isdir(oldBase):
            shutil.move(oldBase, self.base)

    # Profile load/save
    ######################################################################

    def profiles(self):
        def names():
            return self.db.list(
                "select name from profiles where name != '_global'")

        n = names()
        if not n:
            self._ensureProfile()
            n = names()

        return n

    def _unpickle(self, data):
        class Unpickler(pickle.Unpickler):
            def find_class(self, module, name):
                if module == "PyQt5.sip":
                    try:
                        import PyQt5.sip  # type: ignore # pylint: disable=unused-import
                    except:
                        # use old sip location
                        module = "sip"
                fn = super().find_class(module, name)
                if module == "sip" and name == "_unpickle_type":

                    def wrapper(mod, obj, args):
                        if mod.startswith("PyQt4") and obj == "QByteArray":
                            # can't trust str objects from python 2
                            return QByteArray()
                        return fn(mod, obj, args)

                    return wrapper
                else:
                    return fn

        up = Unpickler(io.BytesIO(data), errors="ignore")
        return up.load()

    def _pickle(self, obj):
        return pickle.dumps(obj, protocol=0)

    def load(self, name):
        assert name != "_global"
        data = self.db.scalar(
            "select cast(data as blob) from profiles where name = ?", name)
        self.name = name
        try:
            self.profile = self._unpickle(data)
        except:
            QMessageBox.warning(
                None,
                _("Profile Corrupt"),
                _("""\
Anki could not read your profile data. Window sizes and your sync login \
details have been forgotten."""),
            )

            print("resetting corrupt profile")
            self.profile = profileConf.copy()
            self.save()
        return True

    def save(self):
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, self._pickle(self.profile), self.name)
        self.db.execute(sql, self._pickle(self.meta), "_global")
        self.db.commit()

    def create(self, name):
        prof = profileConf.copy()
        self.db.execute("insert or ignore into profiles values (?, ?)", name,
                        self._pickle(prof))
        self.db.commit()

    def remove(self, name):
        p = self.profileFolder()
        if os.path.exists(p):
            send2trash(p)
        self.db.execute("delete from profiles where name = ?", name)
        self.db.commit()

    def trashCollection(self):
        p = self.collectionPath()
        if os.path.exists(p):
            send2trash(p)

    def rename(self, name):
        oldName = self.name
        oldFolder = self.profileFolder()
        self.name = name
        newFolder = self.profileFolder(create=False)
        if os.path.exists(newFolder):
            if (oldFolder != newFolder) and (oldFolder.lower()
                                             == newFolder.lower()):
                # OS is telling us the folder exists because it does not take
                # case into account; use a temporary folder location
                midFolder = "".join([oldFolder, "-temp"])
                if not os.path.exists(midFolder):
                    os.rename(oldFolder, midFolder)
                    oldFolder = midFolder
                else:
                    showWarning(
                        _("Please remove the folder %s and try again.") %
                        midFolder)
                    self.name = oldName
                    return
            else:
                showWarning(_("Folder already exists."))
                self.name = oldName
                return

        # update name
        self.db.execute("update profiles set name = ? where name = ?", name,
                        oldName)
        # rename folder
        try:
            os.rename(oldFolder, newFolder)
        except Exception as e:
            self.db.rollback()
            if "WinError 5" in str(e):
                showWarning(
                    _("""\
Anki could not rename your profile because it could not rename the profile \
folder on disk. Please ensure you have permission to write to Documents/Anki \
and no other programs are accessing your profile folders, then try again."""))
            else:
                raise
        except:
            self.db.rollback()
            raise
        else:
            self.db.commit()

    # Folder handling
    ######################################################################

    def profileFolder(self, create=True):
        path = os.path.join(self.base, self.name)
        if create:
            self._ensureExists(path)
        return path

    def addonFolder(self):
        return self._ensureExists(os.path.join(self.base, "addons21"))

    def backupFolder(self):
        return self._ensureExists(os.path.join(self.profileFolder(),
                                               "backups"))

    def collectionPath(self):
        return os.path.join(self.profileFolder(), "collection.anki2")

    # Helpers
    ######################################################################

    def _ensureExists(self, path):
        if not os.path.exists(path):
            os.makedirs(path)
        return path

    def _setBaseFolder(self, cmdlineBase):
        if cmdlineBase:
            self.base = os.path.abspath(cmdlineBase)
        elif os.environ.get("ANKI_BASE"):
            self.base = os.path.abspath(os.environ["ANKI_BASE"])
        else:
            self.base = self._defaultBase()
            self.maybeMigrateFolder()
        self.ensureBaseExists()

    def _defaultBase(self):
        if isWin:
            from aqt.winpaths import get_appdata

            return os.path.join(get_appdata(), "Anki2")
        elif isMac:
            return os.path.expanduser("~/Library/Application Support/Anki2")
        else:
            dataDir = os.environ.get("XDG_DATA_HOME",
                                     os.path.expanduser("~/.local/share"))
            if not os.path.exists(dataDir):
                os.makedirs(dataDir)
            return os.path.join(dataDir, "Anki2")

    def _loadMeta(self, retrying=False) -> LoadMetaResult:
        result = LoadMetaResult()
        result.firstTime = False
        result.loadError = retrying

        opath = os.path.join(self.base, "prefs.db")
        path = os.path.join(self.base, "prefs21.db")
        if not retrying and os.path.exists(opath) and not os.path.exists(path):
            shutil.copy(opath, path)

        result.firstTime = not os.path.exists(path)

        def recover():
            # if we can't load profile, start with a new one
            if self.db:
                try:
                    self.db.close()
                except:
                    pass
            for suffix in ("", "-journal"):
                fpath = path + suffix
                if os.path.exists(fpath):
                    os.unlink(fpath)

        # open DB file and read data
        try:
            self.db = DB(path)
            assert self.db.scalar("pragma integrity_check") == "ok"
            self.db.execute("""
create table if not exists profiles
(name text primary key, data text not null);""")
            data = self.db.scalar(
                "select cast(data as blob) from profiles where name = '_global'"
            )
        except:
            if result.loadError:
                # already failed, prevent infinite loop
                raise
            # delete files and try again
            recover()
            return self._loadMeta(retrying=True)

        # try to read data
        if not result.firstTime:
            try:
                self.meta = self._unpickle(data)
                return result
            except:
                print("resetting corrupt _global")
                result.loadError = True
                result.firstTime = True

        # if new or read failed, create a default global profile
        self.meta = metaConf.copy()
        self.db.execute(
            "insert or replace into profiles values ('_global', ?)",
            self._pickle(metaConf),
        )
        return result

    def _ensureProfile(self):
        "Create a new profile if none exists."
        self.create(_("User 1"))
        p = os.path.join(self.base, "README.txt")
        open(p, "w", encoding="utf8").write(
            _("""\
This folder stores all of your Anki data in a single location,
to make backups easy. To tell Anki to use a different location,
please see:

%s
""") % (appHelpSite + "#startupopts"))

    # Default language
    ######################################################################
    # On first run, allow the user to choose the default language

    def setDefaultLang(self):
        # create dialog
        class NoCloseDiag(QDialog):
            def reject(self):
                pass

        d = self.langDiag = NoCloseDiag()
        f = self.langForm = aqt.forms.setlang.Ui_Dialog()
        f.setupUi(d)
        d.accepted.connect(self._onLangSelected)
        d.rejected.connect(lambda: True)
        # default to the system language
        try:
            (lang, enc) = locale.getdefaultlocale()
        except:
            # fails on osx
            lang = "en_US"
        # find index
        idx = None
        en = None
        for c, (name, code) in enumerate(anki.lang.langs):
            if code == "en_US":
                en = c
            if code == lang:
                idx = c
        # if the system language isn't available, revert to english
        if idx is None:
            idx = en
        # update list
        f.lang.addItems([x[0] for x in anki.lang.langs])
        f.lang.setCurrentRow(idx)
        d.exec_()

    def _onLangSelected(self):
        f = self.langForm
        obj = anki.lang.langs[f.lang.currentRow()]
        code = obj[1]
        name = obj[0]
        en = "Are you sure you wish to display Anki's interface in %s?"
        r = QMessageBox.question(None, "Anki", en % name,
                                 QMessageBox.Yes | QMessageBox.No,
                                 QMessageBox.No)
        if r != QMessageBox.Yes:
            return self.setDefaultLang()
        self.setLang(code)

    def setLang(self, code):
        self.meta["defaultLang"] = code
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, self._pickle(self.meta), "_global")
        self.db.commit()
        anki.lang.setLang(code, locale_dir(), local=False)

    # OpenGL
    ######################################################################

    def _glPath(self):
        return os.path.join(self.base, "gldriver")

    def glMode(self):
        if isMac:
            return "auto"

        path = self._glPath()
        if not os.path.exists(path):
            return "software"

        mode = open(path, "r").read().strip()

        if mode == "angle" and isWin:
            return mode
        elif mode == "software":
            return mode
        return "auto"

    def setGlMode(self, mode):
        open(self._glPath(), "w").write(mode)

    def nextGlMode(self):
        mode = self.glMode()
        if mode == "software":
            self.setGlMode("auto")
        elif mode == "auto":
            if isWin:
                self.setGlMode("angle")
            else:
                self.setGlMode("software")
        elif mode == "angle":
            self.setGlMode("software")

    # Scale
    ######################################################################

    def uiScale(self) -> float:
        return self.meta.get("uiScale", 1.0)

    def setUiScale(self, scale: float) -> None:
        self.meta["uiScale"] = scale
Ejemplo n.º 28
0
class MediaManager(object):

    # other code depends on this order, so don't reorder
    regexps = ("(?i)(\[sound:([^]]+)\])",
               "(?i)(<img[^>]+src=[\"']?([^\"'>]+)[\"']?[^>]*>)")

    def __init__(self, col):
        self.col = col
        # media directory
        self._dir = re.sub("(?i)\.(anki2)$", ".media", self.col.path)
        if not os.path.exists(self._dir):
            os.makedirs(self._dir)
        self._oldcwd = os.getcwd()
        os.chdir(self._dir)
        # change database
        self.connect()

    def connect(self):
        if self.col.server:
            return
        path = self.dir() + ".db"
        create = not os.path.exists(path)
        self.db = DB(path)
        if create:
            self._initDB()

    def close(self):
        if self.col.server:
            return
        self.db.close()
        self.db = None
        # change cwd back to old location
        os.chdir(self._oldcwd)

    def dir(self):
        return self._dir

    # Adding media
    ##########################################################################

    def addFile(self, opath):
        """Copy PATH to MEDIADIR, and return new filename.
If the same name exists, compare checksums."""
        mdir = self.dir()
        # remove any dangerous characters
        base = re.sub(r"[][<>:/\\&]", "", os.path.basename(opath))
        dst = os.path.join(mdir, base)
        # if it doesn't exist, copy it directly
        if not os.path.exists(dst):
            shutil.copy2(opath, dst)
            return base
        # if it's identical, reuse
        if self.filesIdentical(opath, dst):
            return base
        # otherwise, find a unique name
        (root, ext) = os.path.splitext(base)

        def repl(match):
            n = int(match.group(1))
            return " (%d)" % (n + 1)

        while True:
            path = os.path.join(mdir, root + ext)
            if not os.path.exists(path):
                break
            reg = " \((\d+)\)$"
            if not re.search(reg, root):
                root = root + " (1)"
            else:
                root = re.sub(reg, repl, root)
        # copy and return
        shutil.copy2(opath, path)
        return os.path.basename(os.path.basename(path))

    def filesIdentical(self, path1, path2):
        "True if files are the same."
        return (checksum(open(path1, "rb").read()) == checksum(
            open(path2, "rb").read()))

    # String manipulation
    ##########################################################################

    def filesInStr(self, mid, string, includeRemote=False):
        l = []
        # convert latex first
        model = self.col.models.get(mid)
        string = mungeQA(string, None, None, model, None, self.col)
        # extract filenames
        for reg in self.regexps:
            for (full, fname) in re.findall(reg, string):
                isLocal = not re.match("(https?|ftp)://", fname.lower())
                if isLocal or includeRemote:
                    l.append(fname)
        return l

    def strip(self, txt):
        for reg in self.regexps:
            txt = re.sub(reg, "", txt)
        return txt

    def escapeImages(self, string):
        # Feeding webkit unicode can result in it not finding images, so on
        # linux/osx we percent escape the image paths as utf8. On Windows the
        # problem is more complicated - if we percent-escape as utf8 it fixes
        # some images but breaks others. When filenames are normalized by
        # dropbox they become unreadable if we escape them.
        if isWin:
            return string

        def repl(match):
            tag = match.group(1)
            fname = match.group(2)
            if re.match("(https?|ftp)://", fname):
                return tag
            return tag.replace(fname, urllib.quote(fname.encode("utf-8")))

        return re.sub(self.regexps[1], repl, string)

    # Rebuilding DB
    ##########################################################################

    def check(self, local=None):
        "Return (missingFiles, unusedFiles)."
        mdir = self.dir()
        # generate card q/a and look through all references
        normrefs = {}

        def norm(s):
            if isinstance(s, unicode):
                return unicodedata.normalize('NFD', s)
            return s

        for f in self.allMedia():
            normrefs[norm(f)] = True
        # loop through directory and find unused & missing media
        unused = []
        if local is None:
            files = os.listdir(mdir)
        else:
            files = local
        for file in files:
            if not local:
                path = os.path.join(mdir, file)
                if not os.path.isfile(path):
                    # ignore directories
                    continue
            nfile = norm(file)
            if nfile not in normrefs:
                unused.append(file)
            else:
                del normrefs[nfile]
        nohave = normrefs.keys()
        return (nohave, unused)

    def allMedia(self):
        "Return a set of all referenced filenames."
        files = set()
        for mid, flds in self.col.db.execute("select mid, flds from notes"):
            for f in self.filesInStr(mid, flds):
                files.add(f)
        return files

    # Copying on import
    ##########################################################################
    # FIXME: check if the files are actually identical, and rewrite references
    # if necessary

    def copyTo(self, rdir):
        "Copy media to RDIR. Return number of files copied."
        ldir = self.dir()
        if not os.path.exists(ldir):
            return 0
        cnt = 0
        for f in os.listdir(ldir):
            src = os.path.join(ldir, f)
            dst = os.path.join(rdir, f)
            if not os.path.exists(dst):
                shutil.copy2(src, dst)
            cnt += 1
        return cnt

    # Media syncing - changes and removal
    ##########################################################################

    def hasChanged(self):
        return self.db.scalar("select 1 from log limit 1")

    def removed(self):
        return self.db.list("select * from log where type = ?", MEDIA_REM)

    def syncRemove(self, fnames):
        # remove provided deletions
        for f in fnames:
            if os.path.exists(f):
                os.unlink(f)
            self.db.execute("delete from log where fname = ?", f)
            self.db.execute("delete from media where fname = ?", f)
        # and all locally-logged deletions, as server has acked them
        self.db.execute("delete from log where type = ?", MEDIA_REM)
        self.db.commit()

    # Media syncing - unbundling zip files from server
    ##########################################################################

    def syncAdd(self, zipData):
        "Extract zip data; true if finished."
        f = StringIO(zipData)
        z = zipfile.ZipFile(f, "r")
        finished = False
        meta = None
        media = []
        sizecnt = 0
        # get meta info first
        assert z.getinfo("_meta").file_size < 100000
        meta = simplejson.loads(z.read("_meta"))
        nextUsn = int(z.read("_usn"))
        # then loop through all files
        for i in z.infolist():
            # check for zip bombs
            sizecnt += i.file_size
            assert sizecnt < 100 * 1024 * 1024
            if i.filename == "_meta" or i.filename == "_usn":
                # ignore previously-retrieved meta
                continue
            elif i.filename == "_finished":
                # last zip in set
                finished = True
            else:
                data = z.read(i)
                csum = checksum(data)
                name = meta[i.filename]
                # can we store the file on this system?
                if self.illegal(i.filename):
                    continue
                # save file
                open(name, "wb").write(data)
                # update db
                media.append((name, csum, self._mtime(name)))
                # remove entries from local log
                self.db.execute("delete from log where fname = ?", name)
        # update media db and note new starting usn
        if media:
            self.db.executemany("insert or replace into media values (?,?,?)",
                                media)
        self.setUsn(nextUsn)  # commits
        # if we have finished adding, we need to record the new folder mtime
        # so that we don't trigger a needless scan
        if finished:
            self.syncMod()
        return finished

    def illegal(self, f):
        if isWin:
            for c in f:
                if c in "<>:\"/\\|?*^":
                    return True
        elif isMac:
            for c in f:
                if c in ":\\/":
                    return True

    # Media syncing - bundling zip files to send to server
    ##########################################################################
    # Because there's no standard filename encoding for zips, and because not
    # all zip clients support retrieving mtime, we store the files as ascii
    # and place a json file in the zip with the necessary information.

    def zipAdded(self):
        "Add files to a zip until over SYNC_ZIP_SIZE. Return zip data."
        f = StringIO()
        z = zipfile.ZipFile(f, "w", compression=zipfile.ZIP_DEFLATED)
        sz = 0
        cnt = 0
        files = {}
        cur = self.db.execute("select fname from log where type = ?",
                              MEDIA_ADD)
        fnames = []
        while 1:
            fname = cur.fetchone()
            if not fname:
                # add a flag so the server knows it can clean up
                z.writestr("_finished", "")
                break
            fname = fname[0]
            fnames.append([fname])
            z.write(fname, str(cnt))
            files[str(cnt)] = fname
            sz += os.path.getsize(fname)
            if sz > SYNC_ZIP_SIZE:
                break
            cnt += 1
        z.writestr("_meta", simplejson.dumps(files))
        z.close()
        return f.getvalue(), fnames

    def forgetAdded(self, fnames):
        if not fnames:
            return
        self.db.executemany("delete from log where fname = ?", fnames)
        self.db.commit()

    # Tracking changes (private)
    ##########################################################################

    def _initDB(self):
        self.db.executescript("""
create table media (fname text primary key, csum text, mod int);
create table meta (dirMod int, usn int); insert into meta values (0, 0);
create table log (fname text primary key, type int);
""")

    def _mtime(self, path):
        return int(os.stat(path).st_mtime)

    def _checksum(self, path):
        return checksum(open(path, "rb").read())

    def usn(self):
        return self.db.scalar("select usn from meta")

    def setUsn(self, usn):
        self.db.execute("update meta set usn = ?", usn)
        self.db.commit()

    def syncMod(self):
        self.db.execute("update meta set dirMod = ?", self._mtime(self.dir()))
        self.db.commit()

    def _changed(self):
        "Return dir mtime if it has changed since the last findChanges()"
        # doesn't track edits, but user can add or remove a file to update
        mod = self.db.scalar("select dirMod from meta")
        mtime = self._mtime(self.dir())
        if mod and mod == mtime:
            return False
        return mtime

    def findChanges(self):
        "Scan the media folder if it's changed, and note any changes."
        if self._changed():
            self._logChanges()

    def _logChanges(self):
        (added, removed) = self._changes()
        log = []
        media = []
        mediaRem = []
        for f in added:
            mt = self._mtime(f)
            media.append((f, self._checksum(f), mt))
            log.append((f, MEDIA_ADD))
        for f in removed:
            mediaRem.append((f, ))
            log.append((f, MEDIA_REM))
        # update media db
        self.db.executemany("insert or replace into media values (?,?,?)",
                            media)
        if mediaRem:
            self.db.executemany("delete from media where fname = ?", mediaRem)
        self.db.execute("update meta set dirMod = ?", self._mtime(self.dir()))
        # and logs
        self.db.executemany("insert or replace into log values (?,?)", log)
        self.db.commit()

    def _changes(self):
        self.cache = {}
        for (name, csum, mod) in self.db.execute("select * from media"):
            self.cache[name] = [csum, mod, False]
        added = []
        removed = []
        # loop through on-disk files
        for f in os.listdir(self.dir()):
            # ignore folders
            if os.path.isdir(f):
                continue
            # newly added?
            if f not in self.cache:
                added.append(f)
            else:
                # modified since last time?
                if self._mtime(f) != self.cache[f][1]:
                    # and has different checksum?
                    if self._checksum(f) != self.cache[f][0]:
                        added.append(f)
                # mark as used
                self.cache[f][2] = True
        # look for any entries in the cache that no longer exist on disk
        for (k, v) in self.cache.items():
            if not v[2]:
                removed.append(k)
        return added, removed

    def sanityCheck(self):
        assert not self.db.scalar("select count() from log")
        cnt = self.db.scalar("select count() from media")
        return cnt
Ejemplo n.º 29
0
class ChangeLog:
    """Tracks changes made to notes"""
    def __init__(self):
        base_path = os.path.dirname(os.path.abspath(__file__))
        db_path = os.path.join(base_path, "..", "user_files", "changelog.db")
        need_create = not os.path.exists(db_path)
        self.db = DB(db_path)
        self.db.setAutocommit(True)
        if need_create:
            self._create_tables()
            self._create_indices()
        self.db.setAutocommit(False)
        max_id = self.db.scalar("select max(id) from changelog")
        if max_id is not None:
            self.next_id = max_id + 1
        else:
            self.next_id = 0

    def close(self):
        self.db.close()

    def commit_changes(self):
        self.db.commit()
        self.db.mod = False

    def record_change(self, op, init_ts, change):
        self.db.execute(
            """
            insert into changelog (id, op, init_ts, ts, nid, fld, old, new)
            values (?,?,?,?,?,?,?,?)
            """, self.next_id, op, init_ts, change.ts, change.nid, change.fld,
            change.old, change.new)
        self.next_id += 1

    def record_and_commit_changes(self, op, init_ts, changes):
        data = []
        for change in changes:
            data.append((self.next_id, op, init_ts, change.ts, change.nid,
                         change.fld, change.old, change.new))
            self.next_id += 1
        self.db.executemany(
            """
            insert into changelog (id, op, init_ts, ts, nid, fld, old, new)
            values (?,?,?,?,?,?,?,?)
        """, data)
        self.commit_changes()

    def _create_tables(self):
        self.db.executescript("""
            create table if not exists changelog (
              id      integer primary key,
              -- identifies the operation performed
              op      text not null,
              -- timestamp (ms) when bulk changes were initiated
              init_ts integer not null,
              -- timestamp (ms) when field was changed
              ts      integer not null,
              -- note id
              nid     integer not null,
              -- field name
              fld     text not null,
              -- old value of field
              old     text not null,
              -- new value of field
              new     text not null
            );
        """)

    def _create_indices(self):
        self.db.executescript("""
            create index if not exists ix_changelog_ts on changelog (ts);
        """)
Ejemplo n.º 30
0
def Collection(path, lock=True, server=False, log=False):
    "Open a new or existing collection. Path must be unicode."
    assert path.endswith(".anki2")
    path = os.path.abspath(path)
    create = not os.path.exists(path)
    if create:
        base = os.path.basename(path)
        for c in ("/", ":", "\\"):
            assert c not in base
    # connect
    db = DB(path)
    db.setAutocommit(True)
    if create:
        ver = _createDB(db)
    elif db.scalar("select ver from col") > 11:
        db.setAutocommit(False)
        raise Exception("invalidColVersion")
    else:
        ver = _upgradeSchema(db)
    db.execute("pragma temp_store = memory")
    db.execute("pragma cache_size = 10000")
    if not isWin:
        db.execute("pragma journal_mode = wal")
    db.setAutocommit(False)
    # add db to col and do any remaining upgrades
    col = _ExtCollection(db, server, log)
    if ver < SCHEMA_VERSION:
        _upgrade(col, ver)
    elif ver > SCHEMA_VERSION:
        raise Exception("This file requires a newer version of Anki.")
    elif create:
        # add in reverse order so basic is default
        addClozeModel(col)
        addBasicTypingModel(col)
        addForwardOptionalReverse(col)
        addForwardReverse(col)
        addBasicModel(col)
        col.save()
    if lock:
        col.lock()
    return col
Ejemplo n.º 31
0
 def reopen(self) -> None:
     "Reconnect to DB (after changing threads, etc)."
     if not self.db:
         self.db = DB(self.path)
         self.media.connect()
         self._openLog()
Ejemplo n.º 32
0
def _addSchema(db: DB, setColConf: bool = True) -> None:
    db.executescript("""
create table if not exists col (
    id              integer primary key,
    crt             integer not null,
    mod             integer not null,
    scm             integer not null,
    ver             integer not null,
    dty             integer not null,
    usn             integer not null,
    ls              integer not null,
    conf            text not null,
    models          text not null,
    decks           text not null,
    dconf           text not null,
    tags            text not null
);

create table if not exists notes (
    id              integer primary key,   /* 0 */
    guid            text not null,         /* 1 */
    mid             integer not null,      /* 2 */
    mod             integer not null,      /* 3 */
    usn             integer not null,      /* 4 */
    tags            text not null,         /* 5 */
    flds            text not null,         /* 6 */
    sfld            integer not null,      /* 7 */
    csum            integer not null,      /* 8 */
    flags           integer not null,      /* 9 */
    data            text not null          /* 10 */
);

create table if not exists cards (
    id              integer primary key,   /* 0 */
    nid             integer not null,      /* 1 */
    did             integer not null,      /* 2 */
    ord             integer not null,      /* 3 */
    mod             integer not null,      /* 4 */
    usn             integer not null,      /* 5 */
    type            integer not null,      /* 6 */
    queue           integer not null,      /* 7 */
    due             integer not null,      /* 8 */
    ivl             integer not null,      /* 9 */
    factor          integer not null,      /* 10 */
    reps            integer not null,      /* 11 */
    lapses          integer not null,      /* 12 */
    left            integer not null,      /* 13 */
    odue            integer not null,      /* 14 */
    odid            integer not null,      /* 15 */
    flags           integer not null,      /* 16 */
    data            text not null          /* 17 */
);

create table if not exists revlog (
    id              integer primary key,
    cid             integer not null,
    usn             integer not null,
    ease            integer not null,
    ivl             integer not null,
    lastIvl         integer not null,
    factor          integer not null,
    time            integer not null,
    type            integer not null
);

create table if not exists graves (
    usn             integer not null,
    oid             integer not null,
    type            integer not null
);

insert or ignore into col
values(1,0,0,%(s)s,%(v)s,0,0,0,'','{}','','','{}');
""" % ({
        "v": SCHEMA_VERSION,
        "s": intTime(1000)
    }))
    if setColConf:
        _addColVars(db, *_getColVars(db))
Ejemplo n.º 33
0
class ProfileManager(object):

    def __init__(self, base=None, profile=None):
        self.name = None
        # instantiate base folder
        if not base:
            base = self._defaultBase()
        self.ensureBaseExists(base)
        self.checkPid(base)
        self.base = base
        # load database and cmdline-provided profile
        self._load()
        if profile:
            try:
                self.load(profile)
            except TypeError:
                raise Exception("Provided profile does not exist.")

    # Startup checks
    ######################################################################
    # These routines run before the language code is initialized, so they
    # can't be translated

    def ensureBaseExists(self, base):
        if not os.path.exists(base):
            try:
                os.makedirs(base)
            except:
                QMessageBox.critical(
                    None, "Error", """\
Anki can't write to the harddisk. Please see the \
documentation for information on using a flash drive.""")
                raise

    def checkPid(self, base):
        p = os.path.join(base, "pid")
        # check if an existing instance is running
        if os.path.exists(p):
            pid = int(open(p).read())
            exists = False
            try:
                os.kill(pid, 0)
                exists = True
            except OSError:
                pass
            if exists:
                QMessageBox.warning(
                    None, "Error", """\
Anki is already running. Please close the existing copy or restart your \
computer.""")
                raise Exception("Already running")
        # write out pid to the file
        open(p, "w").write(str(os.getpid()))
        # add handler to cleanup on exit
        def cleanup():
            os.unlink(p)
        atexit.register(cleanup)

    # Profile load/save
    ######################################################################

    def profiles(self):
        return sorted(
            x for x in self.db.list("select name from profiles")
            if x != "_global")

    def load(self, name, passwd=None):
        prof = cPickle.loads(
            self.db.scalar("select data from profiles where name = ?", name))
        if prof['key'] and prof['key'] != self._pwhash(passwd):
            self.name = None
            return False
        if name != "_global":
            self.name = name
            self.profile = prof
        return True

    def save(self):
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, cPickle.dumps(self.profile), self.name)
        self.db.execute(sql, cPickle.dumps(self.meta), "_global")
        self.db.commit()

    def create(self, name):
        prof = profileConf.copy()
        prof['lang'] = self.meta['defaultLang']
        self.db.execute("insert into profiles values (?, ?)",
                        name, cPickle.dumps(prof))
        self.db.commit()

    def remove(self, name):
        shutil.rmtree(self.profileFolder())
        self.db.execute("delete from profiles where name = ?", name)
        self.db.commit()

    def rename(self, name):
        oldFolder = self.profileFolder()
        # update name
        self.db.execute("update profiles set name = ? where name = ?",
                        name, self.name)
        # rename folder
        self.name = name
        newFolder = self.profileFolder()
        os.rmdir(newFolder)
        os.rename(oldFolder, newFolder)
        self.db.commit()

    # Folder handling
    ######################################################################

    def profileFolder(self):
        return self._ensureExists(os.path.join(self.base, self.name))

    def addonFolder(self):
        return self._ensureExists(os.path.join(self.base, "addons"))

    def backupFolder(self):
        return self._ensureExists(
            os.path.join(self.profileFolder(), "backups"))

    def collectionPath(self):
        return os.path.join(self.profileFolder(), "collection.anki2")

    # Helpers
    ######################################################################

    def _ensureExists(self, path):
        if not os.path.exists(path):
            os.makedirs(path)
        return path

    def _defaultBase(self):
        if isWin:
            s = QSettings(QSettings.UserScope, "Microsoft", "Windows")
            s.beginGroup("CurrentVersion/Explorer/Shell Folders")
            d = s.value("Personal")
            return os.path.join(d, "Anki")
        elif isMac:
            return os.path.expanduser("~/Documents/Anki")
        else:
            return os.path.expanduser("~/Anki")

    def _load(self):
        path = os.path.join(self.base, "prefs.db")
        new = not os.path.exists(path)
        self.db = DB(path, text=str)
        self.db.execute("""
create table if not exists profiles
(name text primary key, data text not null);""")
        if new:
            # create a default global profile
            self.meta = metaConf.copy()
            self.db.execute("insert into profiles values ('_global', ?)",
                            cPickle.dumps(metaConf))
            self._setDefaultLang()
            # and save a default user profile for later (commits)
            self.create("User 1")
        else:
            # load previously created
            self.meta = cPickle.loads(
                self.db.scalar(
                    "select data from profiles where name = '_global'"))

    def _pwhash(self, passwd):
        return checksum(unicode(self.meta['id'])+unicode(passwd))


    # Default language
    ######################################################################
    # On first run, allow the user to choose the default language

    def _setDefaultLang(self):
        # the dialog expects _ to be defined, but we're running before
        # setLang() has been called. so we create a dummy op for now
        import __builtin__
        __builtin__.__dict__['_'] = lambda x: x
        # create dialog
        class NoCloseDiag(QDialog):
            def reject(self):
                pass
        d = self.langDiag = NoCloseDiag()
        f = self.langForm = aqt.forms.setlang.Ui_Dialog()
        f.setupUi(d)
        d.connect(d, SIGNAL("accepted()"), self._onLangSelected)
        d.connect(d, SIGNAL("rejected()"), lambda: True)
        # default to the system language
        (lang, enc) = locale.getdefaultlocale()
        if lang and lang not in ("pt_BR", "zh_CN", "zh_TW"):
            lang = re.sub("(.*)_.*", "\\1", lang)
        # find index
        idx = None
        en = None
        for c, (name, code) in enumerate(langs):
            if code == "en":
                en = c
            if code == lang:
                idx = c
        # if the system language isn't available, revert to english
        if idx is None:
            idx = en
        # update list
        f.lang.addItems([x[0] for x in langs])
        f.lang.setCurrentRow(idx)
        d.exec_()

    def _onLangSelected(self):
        f = self.langForm
        code = langs[f.lang.currentRow()][1]
        self.meta['defaultLang'] = code
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, cPickle.dumps(self.meta), "_global")
        self.db.commit()
Ejemplo n.º 34
0
def _upgradeSchema(db: DB) -> Any:
    ver = db.scalar("select ver from col")
    if ver == SCHEMA_VERSION:
        return ver
    # add odid to cards, edue->odue
    ######################################################################
    if db.scalar("select ver from col") == 1:
        db.execute("alter table cards rename to cards2")
        _addSchema(db, setColConf=False)
        db.execute("""
insert into cards select
id, nid, did, ord, mod, usn, type, queue, due, ivl, factor, reps, lapses,
left, edue, 0, flags, data from cards2""")
        db.execute("drop table cards2")
        db.execute("update col set ver = 2")
        _updateIndices(db)
    # remove did from notes
    ######################################################################
    if db.scalar("select ver from col") == 2:
        db.execute("alter table notes rename to notes2")
        _addSchema(db, setColConf=False)
        db.execute("""
insert into notes select
id, guid, mid, mod, usn, tags, flds, sfld, csum, flags, data from notes2""")
        db.execute("drop table notes2")
        db.execute("update col set ver = 3")
        _updateIndices(db)
    return ver
Ejemplo n.º 35
0
class ProfileManager:

    def __init__(self, base=None):
        self.name = None
        self.db = None
        # instantiate base folder
        self._setBaseFolder(base)

    def setupMeta(self):
        # load metadata
        self.firstRun = self._loadMeta()

    # profile load on startup
    def openProfile(self, profile):
        if profile:
            if profile not in self.profiles():
                QMessageBox.critical(None, "Error", "Requested profile does not exist.")
                sys.exit(1)
            try:
                self.load(profile)
            except TypeError:
                raise Exception("Provided profile does not exist.")

    # Base creation
    ######################################################################

    def ensureBaseExists(self):
        try:
            self._ensureExists(self.base)
        except:
            # can't translate, as lang not initialized
            QMessageBox.critical(
                None, "Error", """\
Anki could not create the folder %s. Please ensure that location is not \
read-only and you have permission to write to it. If you cannot fix this \
issue, please see the documentation for information on running Anki from \
a flash drive.""" % self.base)
            raise

    # Folder migration
    ######################################################################

    def _oldFolderLocation(self):
        if isMac:
            return os.path.expanduser("~/Documents/Anki")
        elif isWin:
            from aqt.winpaths import get_personal
            return os.path.join(get_personal(), "Anki")
        else:
            p = os.path.expanduser("~/Anki")
            if os.path.isdir(p):
                return p
            return os.path.expanduser("~/Documents/Anki")

    def maybeMigrateFolder(self):
        oldBase = self._oldFolderLocation()

        if oldBase and not os.path.exists(self.base) and os.path.isdir(oldBase):
            shutil.move(oldBase, self.base)

    # Profile load/save
    ######################################################################

    def profiles(self):
        return sorted(x for x in
            self.db.list("select name from profiles")
            if x != "_global")

    def _unpickle(self, data):
        class Unpickler(pickle.Unpickler):
            def find_class(self, module, name):
                fn = super().find_class(module, name)
                if module == "sip" and name == "_unpickle_type":
                    def wrapper(mod, obj, args):
                        if mod.startswith("PyQt4") and obj == "QByteArray":
                            # can't trust str objects from python 2
                            return QByteArray()
                        return fn(mod, obj, args)
                    return wrapper
                else:
                    return fn
        up = Unpickler(io.BytesIO(data), errors="ignore")
        return up.load()

    def _pickle(self, obj):
        return pickle.dumps(obj, protocol=0)

    def load(self, name):
        assert name != "_global"
        data = self.db.scalar("select cast(data as blob) from profiles where name = ?", name)
        self.name = name
        try:
            self.profile = self._unpickle(data)
        except:
            print("resetting corrupt profile")
            self.profile = profileConf.copy()
            self.save()
        return True

    def save(self):
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, self._pickle(self.profile), self.name)
        self.db.execute(sql, self._pickle(self.meta), "_global")
        self.db.commit()

    def create(self, name):
        prof = profileConf.copy()
        self.db.execute("insert or ignore into profiles values (?, ?)",
                        name, self._pickle(prof))
        self.db.commit()

    def remove(self, name):
        p = self.profileFolder()
        if os.path.exists(p):
            send2trash(p)
        self.db.execute("delete from profiles where name = ?", name)
        self.db.commit()

    def trashCollection(self):
        p = self.collectionPath()
        if os.path.exists(p):
            send2trash(p)

    def rename(self, name):
        oldName = self.name
        oldFolder = self.profileFolder()
        self.name = name
        newFolder = self.profileFolder(create=False)
        if os.path.exists(newFolder):
            if (oldFolder != newFolder) and (
                    oldFolder.lower() == newFolder.lower()):
                # OS is telling us the folder exists because it does not take
                # case into account; use a temporary folder location
                midFolder = ''.join([oldFolder, '-temp'])
                if not os.path.exists(midFolder):
                    os.rename(oldFolder, midFolder)
                    oldFolder = midFolder
                else:
                    showWarning(_("Please remove the folder %s and try again.")
                            % midFolder)
                    self.name = oldName
                    return
            else:
                showWarning(_("Folder already exists."))
                self.name = oldName
                return

        # update name
        self.db.execute("update profiles set name = ? where name = ?",
                        name, oldName)
        # rename folder
        try:
            os.rename(oldFolder, newFolder)
        except WindowsError as e:
            self.db.rollback()
            if "Access is denied" in e:
                showWarning(_("""\
Anki could not rename your profile because it could not rename the profile \
folder on disk. Please ensure you have permission to write to Documents/Anki \
and no other programs are accessing your profile folders, then try again."""))
            else:
                raise
        except:
            self.db.rollback()
            raise
        else:
            self.db.commit()

    # Folder handling
    ######################################################################

    def profileFolder(self, create=True):
        path = os.path.join(self.base, self.name)
        if create:
            self._ensureExists(path)
        return path

    def addonFolder(self):
        return self._ensureExists(os.path.join(self.base, "addons21"))

    def backupFolder(self):
        return self._ensureExists(
            os.path.join(self.profileFolder(), "backups"))

    def collectionPath(self):
        return os.path.join(self.profileFolder(), "collection.anki2")

    # Helpers
    ######################################################################

    def _ensureExists(self, path):
        if not os.path.exists(path):
            os.makedirs(path)
        return path

    def _setBaseFolder(self, cmdlineBase):
        if cmdlineBase:
            self.base = os.path.abspath(cmdlineBase)
        elif os.environ.get("ANKI_BASE"):
            self.base = os.path.abspath(os.environ["ANKI_BASE"])
        else:
            self.base = self._defaultBase()
            self.maybeMigrateFolder()
        self.ensureBaseExists()

    def _defaultBase(self):
        if isWin:
            from aqt.winpaths import get_appdata
            return os.path.join(get_appdata(), "Anki2")
        elif isMac:
            return os.path.expanduser("~/Library/Application Support/Anki2")
        else:
            dataDir = os.environ.get(
                "XDG_DATA_HOME", os.path.expanduser("~/.local/share"))
            if not os.path.exists(dataDir):
                os.makedirs(dataDir)
            return os.path.join(dataDir, "Anki2")

    def _loadMeta(self):
        opath = os.path.join(self.base, "prefs.db")
        path = os.path.join(self.base, "prefs21.db")
        if os.path.exists(opath) and not os.path.exists(path):
            shutil.copy(opath, path)

        new = not os.path.exists(path)
        def recover():
            # if we can't load profile, start with a new one
            if self.db:
                try:
                    self.db.close()
                except:
                    pass
            for suffix in ("", "-journal"):
                fpath = path + suffix
                if os.path.exists(fpath):
                    os.unlink(fpath)
            QMessageBox.warning(
                None, "Preferences Corrupt", """\
Anki's prefs21.db file was corrupt and has been recreated. If you were using multiple \
profiles, please add them back using the same names to recover your cards.""")
        try:
            self.db = DB(path)
            assert self.db.scalar("pragma integrity_check") == "ok"
            self.db.execute("""
create table if not exists profiles
(name text primary key, data text not null);""")
            data = self.db.scalar(
                "select cast(data as blob) from profiles where name = '_global'")
        except:
            recover()
            return self._loadMeta()
        if not new:
            # load previously created data
            try:
                self.meta = self._unpickle(data)
                return
            except:
                print("resetting corrupt _global")
        # create a default global profile
        self.meta = metaConf.copy()
        self.db.execute("insert or replace into profiles values ('_global', ?)",
                        self._pickle(metaConf))
        self._setDefaultLang()
        return True

    def ensureProfile(self):
        "Create a new profile if none exists."
        if self.firstRun:
            self.create(_("User 1"))
            p = os.path.join(self.base, "README.txt")
            open(p, "w").write(_("""\
This folder stores all of your Anki data in a single location,
to make backups easy. To tell Anki to use a different location,
please see:

%s
""") % (appHelpSite +  "#startupopts"))

    # Default language
    ######################################################################
    # On first run, allow the user to choose the default language

    def _setDefaultLang(self):
        # the dialog expects _ to be defined, but we're running before
        # setupLang() has been called. so we create a dummy op for now
        import builtins
        builtins.__dict__['_'] = lambda x: x
        # create dialog
        class NoCloseDiag(QDialog):
            def reject(self):
                pass
        d = self.langDiag = NoCloseDiag()
        f = self.langForm = aqt.forms.setlang.Ui_Dialog()
        f.setupUi(d)
        d.accepted.connect(self._onLangSelected)
        d.rejected.connect(lambda: True)
        # default to the system language
        try:
            (lang, enc) = locale.getdefaultlocale()
        except:
            # fails on osx
            lang = "en"
        if lang and lang not in ("pt_BR", "zh_CN", "zh_TW"):
            lang = re.sub("(.*)_.*", "\\1", lang)
        # find index
        idx = None
        en = None
        for c, (name, code) in enumerate(anki.lang.langs):
            if code == "en":
                en = c
            if code == lang:
                idx = c
        # if the system language isn't available, revert to english
        if idx is None:
            idx = en
        # update list
        f.lang.addItems([x[0] for x in anki.lang.langs])
        f.lang.setCurrentRow(idx)
        d.exec_()

    def _onLangSelected(self):
        f = self.langForm
        obj = anki.lang.langs[f.lang.currentRow()]
        code = obj[1]
        name = obj[0]
        en = "Are you sure you wish to display Anki's interface in %s?"
        r = QMessageBox.question(
            None, "Anki", en%name, QMessageBox.Yes | QMessageBox.No,
            QMessageBox.No)
        if r != QMessageBox.Yes:
            return self._setDefaultLang()
        self.setLang(code)

    def setLang(self, code):
        self.meta['defaultLang'] = code
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, self._pickle(self.meta), "_global")
        self.db.commit()
        anki.lang.setLang(code, local=False)

    # OpenGL
    ######################################################################

    def _glPath(self):
        return os.path.join(self.base, "gldriver")

    def glMode(self):
        if isMac:
            return "auto"

        path = self._glPath()
        if not os.path.exists(path):
            return "software"

        mode = open(path, "r").read().strip()

        if mode == "angle" and isWin:
            return mode
        elif mode == "software":
            return mode
        return "auto"

    def setGlMode(self, mode):
        open(self._glPath(), "w").write(mode)

    def nextGlMode(self):
        mode = self.glMode()
        if mode == "software":
            self.setGlMode("auto")
        elif mode == "auto":
            if isWin:
                self.setGlMode("angle")
            else:
                self.setGlMode("software")
        elif mode == "angle":
            self.setGlMode("software")
Ejemplo n.º 36
0
def Collection(path, lock=True, server=False, log=False):
    "Open a new or existing collection. Path must be unicode."
    assert path.endswith(".anki2")
    path = os.path.abspath(path)
    create = not os.path.exists(path)
    if create:
        base = os.path.basename(path)
        for c in ("/", ":", "\\"):
            assert c not in base
    # connect
    db = DB(path)
    db.setAutocommit(True)
    if create:
        ver = _createDB(db)
    else:
        ver = _upgradeSchema(db)
    db.execute("pragma temp_store = memory")
    db.execute("pragma cache_size = 10000")
    if not isWin:
        db.execute("pragma journal_mode = wal")
    db.setAutocommit(False)
    # add db to col and do any remaining upgrades
    col = _Collection(db, server, log)
    if ver < SCHEMA_VERSION:
        _upgrade(col, ver)
    elif ver > SCHEMA_VERSION:
        raise Exception("This file requires a newer version of Anki.")
    elif create:
        # add in reverse order so basic is default
        addClozeModel(col)
        addBasicTypingModel(col)
        addForwardOptionalReverse(col)
        addForwardReverse(col)
        addBasicModel(col)
        col.save()
    if lock:
        col.lock()
    return col
Ejemplo n.º 37
0
class ProfileManager:

    def __init__(self, base=None, profile=None):
        self.name = None
        self.db = None
        # instantiate base folder
        self._setBaseFolder(base)
        # load metadata
        self.firstRun = self._loadMeta()
        # did the user request a profile to start up with?
        if profile:
            if profile not in self.profiles():
                QMessageBox.critical(None, "Error", "Requested profile does not exist.")
                sys.exit(1)
            try:
                self.load(profile)
            except TypeError:
                raise Exception("Provided profile does not exist.")

    # Base creation
    ######################################################################

    def ensureBaseExists(self):
        try:
            self._ensureExists(self.base)
        except:
            # can't translate, as lang not initialized
            QMessageBox.critical(
                None, "Error", """\
Anki could not create the folder %s. Please ensure that location is not \
read-only and you have permission to write to it. If you cannot fix this \
issue, please see the documentation for information on running Anki from \
a flash drive.""" % self.base)
            raise

    # Folder migration
    ######################################################################

    def _oldFolderLocation(self):
        if isMac:
            return os.path.expanduser("~/Documents/Anki")
        elif isWin:
            loc = QStandardPaths.writableLocation(QStandardPaths.DocumentsLocation)
            return os.path.join(loc, "Anki")
        else:
            p = os.path.expanduser("~/Anki")
            if os.path.exists(p):
                return p
            else:
                loc = QStandardPaths.writableLocation(QStandardPaths.DocumentsLocation)
                if loc[:-1] == QStandardPaths.writableLocation(
                        QStandardPaths.HomeLocation):
                    # occasionally "documentsLocation" will return the home
                    # folder because the Documents folder isn't configured
                    # properly; fall back to an English path
                    return os.path.expanduser("~/Documents/Anki")
                else:
                    return os.path.join(loc, "Anki")

    def maybeMigrateFolder(self):
        oldBase = self._oldFolderLocation()

        if not os.path.exists(self.base) and os.path.exists(oldBase):
            shutil.move(oldBase, self.base)

    # Profile load/save
    ######################################################################

    def profiles(self):
        return sorted(x for x in
            self.db.list("select name from profiles")
            if x != "_global")

    def load(self, name, passwd=None):
        data = self.db.scalar("select cast(data as blob) from profiles where name = ?", name)
        # some profiles created in python2 may not decode properly
        prof = pickle.loads(data, errors="ignore")
        if prof['key'] and prof['key'] != self._pwhash(passwd):
            self.name = None
            return False
        if name != "_global":
            self.name = name
            self.profile = prof
        return True

    def save(self):
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, pickle.dumps(self.profile), self.name)
        self.db.execute(sql, pickle.dumps(self.meta), "_global")
        self.db.commit()

    def create(self, name):
        prof = profileConf.copy()
        self.db.execute("insert into profiles values (?, ?)",
                        name, pickle.dumps(prof))
        self.db.commit()

    def remove(self, name):
        p = self.profileFolder()
        if os.path.exists(p):
            send2trash(p)
        self.db.execute("delete from profiles where name = ?", name)
        self.db.commit()

    def rename(self, name):
        oldName = self.name
        oldFolder = self.profileFolder()
        self.name = name
        newFolder = self.profileFolder(create=False)
        if os.path.exists(newFolder):
            if (oldFolder != newFolder) and (
                    oldFolder.lower() == newFolder.lower()):
                # OS is telling us the folder exists because it does not take
                # case into account; use a temporary folder location
                midFolder = ''.join([oldFolder, '-temp'])
                if not os.path.exists(midFolder):
                    os.rename(oldFolder, midFolder)
                    oldFolder = midFolder
                else:
                    showWarning(_("Please remove the folder %s and try again.")
                            % midFolder)
                    self.name = oldName
                    return
            else:
                showWarning(_("Folder already exists."))
                self.name = oldName
                return

        # update name
        self.db.execute("update profiles set name = ? where name = ?",
                        name, oldName)
        # rename folder
        try:
            os.rename(oldFolder, newFolder)
        except WindowsError as e:
            self.db.rollback()
            if "Access is denied" in e:
                showWarning(_("""\
Anki could not rename your profile because it could not rename the profile \
folder on disk. Please ensure you have permission to write to Documents/Anki \
and no other programs are accessing your profile folders, then try again."""))
            else:
                raise
        except:
            self.db.rollback()
            raise
        else:
            self.db.commit()

    # Folder handling
    ######################################################################

    def profileFolder(self, create=True):
        path = os.path.join(self.base, self.name)
        if create:
            self._ensureExists(path)
        return path

    def addonFolder(self):
        return self._ensureExists(os.path.join(self.base, "addons21"))

    def backupFolder(self):
        return self._ensureExists(
            os.path.join(self.profileFolder(), "backups"))

    def collectionPath(self):
        return os.path.join(self.profileFolder(), "collection.anki2")

    # Helpers
    ######################################################################

    def _ensureExists(self, path):
        if not os.path.exists(path):
            os.makedirs(path)
        return path

    def _setBaseFolder(self, cmdlineBase):
        if cmdlineBase:
            self.base = os.path.abspath(cmdlineBase)
        elif os.environ.get("ANKI_BASE"):
            self.base = os.path.abspath(os.environ["ANKI_BASE"])
        else:
            self.base = self._defaultBase()
            self.maybeMigrateFolder()
        self.ensureBaseExists()

    def _defaultBase(self):
        if isWin:
            loc = QStandardPaths.writableLocation(QStandardPaths.AppDataLocation)
            return os.path.join(loc, "Anki2")
        elif isMac:
            return os.path.expanduser("~/Library/Application Support/Anki2")
        else:
            dataDir = os.environ.get(
                "XDG_DATA_HOME", os.path.expanduser("~/.local/share"))
            if not os.path.exists(dataDir):
                os.makedirs(dataDir)
            return os.path.join(dataDir, "Anki2")

    def _loadMeta(self):
        path = os.path.join(self.base, "prefs21.db")
        new = not os.path.exists(path)
        def recover():
            # if we can't load profile, start with a new one
            if self.db:
                try:
                    self.db.close()
                except:
                    pass
            broken = path+".broken"
            if os.path.exists(broken):
                os.unlink(broken)
            os.rename(path, broken)
            QMessageBox.warning(
                None, "Preferences Corrupt", """\
Anki's prefs21.db file was corrupt and has been recreated. If you were using multiple \
profiles, please add them back using the same names to recover your cards.""")
        try:
            self.db = DB(path)
            self.db.execute("""
create table if not exists profiles
(name text primary key, data text not null);""")
        except:
            recover()
            return self._loadMeta()
        if not new:
            # load previously created
            try:
                self.meta = pickle.loads(
                    self.db.scalar(
                        "select cast(data as blob) from profiles where name = '_global'"))
                return
            except:
                recover()
                return self._loadMeta()
        # create a default global profile
        self.meta = metaConf.copy()
        self.db.execute("insert or replace into profiles values ('_global', ?)",
                        pickle.dumps(metaConf))
        self._setDefaultLang()
        return True

    def ensureProfile(self):
        "Create a new profile if none exists."
        if self.firstRun:
            self.create(_("User 1"))
            p = os.path.join(self.base, "README.txt")
            open(p, "w").write(_("""\
This folder stores all of your Anki data in a single location,
to make backups easy. To tell Anki to use a different location,
please see:

%s
""") % (appHelpSite +  "#startupopts"))

    def _pwhash(self, passwd):
        return checksum(str(self.meta['id'])+str(passwd))

    # Default language
    ######################################################################
    # On first run, allow the user to choose the default language

    def _setDefaultLang(self):
        # the dialog expects _ to be defined, but we're running before
        # setupLang() has been called. so we create a dummy op for now
        import builtins
        builtins.__dict__['_'] = lambda x: x
        # create dialog
        class NoCloseDiag(QDialog):
            def reject(self):
                pass
        d = self.langDiag = NoCloseDiag()
        f = self.langForm = aqt.forms.setlang.Ui_Dialog()
        f.setupUi(d)
        d.accepted.connect(self._onLangSelected)
        d.rejected.connect(lambda: True)
        # default to the system language
        try:
            (lang, enc) = locale.getdefaultlocale()
        except:
            # fails on osx
            lang = "en"
        if lang and lang not in ("pt_BR", "zh_CN", "zh_TW"):
            lang = re.sub("(.*)_.*", "\\1", lang)
        # find index
        idx = None
        en = None
        for c, (name, code) in enumerate(anki.lang.langs):
            if code == "en":
                en = c
            if code == lang:
                idx = c
        # if the system language isn't available, revert to english
        if idx is None:
            idx = en
        # update list
        f.lang.addItems([x[0] for x in anki.lang.langs])
        f.lang.setCurrentRow(idx)
        d.exec_()

    def _onLangSelected(self):
        f = self.langForm
        obj = anki.lang.langs[f.lang.currentRow()]
        code = obj[1]
        name = obj[0]
        en = "Are you sure you wish to display Anki's interface in %s?"
        r = QMessageBox.question(
            None, "Anki", en%name, QMessageBox.Yes | QMessageBox.No,
            QMessageBox.No)
        if r != QMessageBox.Yes:
            return self._setDefaultLang()
        self.setLang(code)

    def setLang(self, code):
        self.meta['defaultLang'] = code
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, pickle.dumps(self.meta), "_global")
        self.db.commit()
        anki.lang.setLang(code, local=False)
Ejemplo n.º 38
0
Archivo: media.py Proyecto: gbraad/anki
class MediaManager(object):

    soundRegexps = ["(?i)(\[sound:(?P<fname>[^]]+)\])"]
    imgRegexps = [
        # src element quoted case
        "(?i)(<img[^>]+src=(?P<str>[\"'])(?P<fname>[^>]+?)(?P=str)[^>]*>)",
        # unquoted case
        "(?i)(<img[^>]+src=(?!['\"])(?P<fname>[^ >]+)[^>]*?>)",
    ]
    regexps = soundRegexps + imgRegexps

    def __init__(self, col, server):
        self.col = col
        if server:
            self._dir = None
            return
        # media directory
        self._dir = re.sub("(?i)\.(anki2)$", ".media", self.col.path)
        # convert dir to unicode if it's not already
        if isinstance(self._dir, str):
            self._dir = unicode(self._dir, sys.getfilesystemencoding())
        if not os.path.exists(self._dir):
            os.makedirs(self._dir)
        try:
            self._oldcwd = os.getcwd()
        except OSError:
            # cwd doesn't exist
            self._oldcwd = None
        os.chdir(self._dir)
        # change database
        self.connect()

    def connect(self):
        if self.col.server:
            return
        path = self.dir()+".db"
        create = not os.path.exists(path)
        os.chdir(self._dir)
        self.db = DB(path)
        if create:
            self._initDB()

    def close(self):
        if self.col.server:
            return
        self.db.close()
        self.db = None
        # change cwd back to old location
        if self._oldcwd:
            try:
                os.chdir(self._oldcwd)
            except:
                # may have been deleted
                pass

    def dir(self):
        return self._dir

    def _isFAT32(self):
        if not isWin:
            return
        import win32api, win32file
        name = win32file.GetVolumeNameForVolumeMountPoint(self._dir[:3])
        if win32api.GetVolumeInformation(name)[4].lower().startswith("fat"):
            return True

    # Adding media
    ##########################################################################

    def addFile(self, opath):
        """Copy PATH to MEDIADIR, and return new filename.
If the same name exists, compare checksums."""
        mdir = self.dir()
        # remove any dangerous characters
        base = re.sub(r"[][<>:/\\&?\"\|]", "", os.path.basename(opath))
        (root, ext) = os.path.splitext(base)
        def repl(match):
            n = int(match.group(1))
            return " (%d)" % (n+1)
        # find the first available name
        while True:
            path = os.path.join(mdir, root + ext)
            # if it doesn't exist, copy it directly
            if not os.path.exists(path):
                shutil.copyfile(opath, path)
                return os.path.basename(os.path.basename(path))
            # if it's identical, reuse
            if self.filesIdentical(opath, path):
                return os.path.basename(path)
            # otherwise, increment the index in the filename
            reg = " \((\d+)\)$"
            if not re.search(reg, root):
                root = root + " (1)"
            else:
                root = re.sub(reg, repl, root)

    def filesIdentical(self, path1, path2):
        "True if files are the same."
        return (checksum(open(path1, "rb").read()) ==
                checksum(open(path2, "rb").read()))

    # String manipulation
    ##########################################################################

    def filesInStr(self, mid, string, includeRemote=False):
        l = []
        model = self.col.models.get(mid)
        strings = []
        if model['type'] == MODEL_CLOZE and "{{c" in string:
            # if the field has clozes in it, we'll need to expand the
            # possibilities so we can render latex
            strings = self._expandClozes(string)
        else:
            strings = [string]
        for string in strings:
            # handle latex
            string = mungeQA(string, None, None, model, None, self.col)
            # extract filenames
            for reg in self.regexps:
                for match in re.finditer(reg, string):
                    fname = match.group("fname")
                    isLocal = not re.match("(https?|ftp)://", fname.lower())
                    if isLocal or includeRemote:
                        l.append(fname)
        return l

    def _expandClozes(self, string):
        ords = set(re.findall("{{c(\d+)::.+?}}", string))
        strings = []
        from anki.template.template import clozeReg
        def qrepl(m):
            if m.group(3):
                return "[%s]" % m.group(3)
            else:
                return "[...]"
        def arepl(m):
            return m.group(1)
        for ord in ords:
            s = re.sub(clozeReg%ord, qrepl, string)
            s = re.sub(clozeReg%".+?", "\\1", s)
            strings.append(s)
        strings.append(re.sub(clozeReg%".+?", arepl, string))
        return strings

    def transformNames(self, txt, func):
        for reg in self.regexps:
            txt = re.sub(reg, func, txt)
        return txt

    def strip(self, txt):
        for reg in self.regexps:
            txt = re.sub(reg, "", txt)
        return txt

    def escapeImages(self, string):
        def repl(match):
            tag = match.group(0)
            fname = match.group("fname")
            if re.match("(https?|ftp)://", fname):
                return tag
            return tag.replace(
                fname, urllib.quote(fname.encode("utf-8")))
        for reg in self.imgRegexps:
            string = re.sub(reg, repl, string)
        return string

    # Rebuilding DB
    ##########################################################################

    def check(self, local=None):
        "Return (missingFiles, unusedFiles)."
        mdir = self.dir()
        # generate card q/a and look through all references
        normrefs = {}
        def norm(s):
            if isinstance(s, unicode) and isMac:
                return unicodedata.normalize('NFD', s)
            return s
        for f in self.allMedia():
            normrefs[norm(f)] = True
        # loop through directory and find unused & missing media
        unused = []
        if local is None:
            files = os.listdir(mdir)
        else:
            files = local
        for file in files:
            if not local:
                path = os.path.join(mdir, file)
                if not os.path.isfile(path):
                    # ignore directories
                    continue
                if file.startswith("_"):
                    # leading _ says to ignore file
                    continue
            nfile = norm(file)
            if nfile not in normrefs:
                unused.append(file)
            else:
                del normrefs[nfile]
        nohave = [x for x in normrefs.keys() if not x.startswith("_")]
        return (nohave, unused)

    def allMedia(self):
        "Return a set of all referenced filenames."
        files = set()
        for mid, flds in self.col.db.execute("select mid, flds from notes"):
            for f in self.filesInStr(mid, flds):
                files.add(f)
        return files

    # Copying on import
    ##########################################################################

    def have(self, fname):
        return os.path.exists(os.path.join(self.dir(), fname))

    # Media syncing - changes and removal
    ##########################################################################

    def hasChanged(self):
        return self.db.scalar("select 1 from log limit 1")

    def removed(self):
        return self.db.list("select * from log where type = ?", MEDIA_REM)

    def syncRemove(self, fnames):
        # remove provided deletions
        for f in fnames:
            if os.path.exists(f):
                send2trash.send2trash(f)
            self.db.execute("delete from log where fname = ?", f)
            self.db.execute("delete from media where fname = ?", f)
        # and all locally-logged deletions, as server has acked them
        self.db.execute("delete from log where type = ?", MEDIA_REM)
        self.db.commit()

    # Media syncing - unbundling zip files from server
    ##########################################################################

    def syncAdd(self, zipData):
        "Extract zip data; true if finished."
        f = StringIO(zipData)
        z = zipfile.ZipFile(f, "r")
        finished = False
        meta = None
        media = []
        sizecnt = 0
        # get meta info first
        assert z.getinfo("_meta").file_size < 100000
        meta = json.loads(z.read("_meta"))
        nextUsn = int(z.read("_usn"))
        # then loop through all files
        for i in z.infolist():
            # check for zip bombs
            sizecnt += i.file_size
            assert sizecnt < 100*1024*1024
            if i.filename == "_meta" or i.filename == "_usn":
                # ignore previously-retrieved meta
                continue
            elif i.filename == "_finished":
                # last zip in set
                finished = True
            else:
                data = z.read(i)
                csum = checksum(data)
                name = meta[i.filename]
                # can we store the file on this system?
                if self.illegal(name):
                    continue
                # save file
                open(name, "wb").write(data)
                # update db
                media.append((name, csum, self._mtime(name)))
                # remove entries from local log
                self.db.execute("delete from log where fname = ?", name)
        # update media db and note new starting usn
        if media:
            self.db.executemany(
                "insert or replace into media values (?,?,?)", media)
        self.setUsn(nextUsn) # commits
        # if we have finished adding, we need to record the new folder mtime
        # so that we don't trigger a needless scan
        if finished:
            self.syncMod()
        return finished

    def illegal(self, f):
        if isWin:
            for c in f:
                if c in "<>:\"/\\|?*^":
                    return True
        elif isMac:
            for c in f:
                if c in ":\\/":
                    return True

    # Media syncing - bundling zip files to send to server
    ##########################################################################
    # Because there's no standard filename encoding for zips, and because not
    # all zip clients support retrieving mtime, we store the files as ascii
    # and place a json file in the zip with the necessary information.

    def zipAdded(self):
        "Add files to a zip until over SYNC_ZIP_SIZE/COUNT. Return zip data."
        f = StringIO()
        z = zipfile.ZipFile(f, "w", compression=zipfile.ZIP_DEFLATED)
        sz = 0
        cnt = 0
        files = {}
        cur = self.db.execute(
            "select fname from log where type = ?", MEDIA_ADD)
        fnames = []
        while 1:
            fname = cur.fetchone()
            if not fname:
                # add a flag so the server knows it can clean up
                z.writestr("_finished", "")
                break
            fname = fname[0]
            fnames.append([fname])
            z.write(fname, str(cnt))
            files[str(cnt)] = fname
            sz += os.path.getsize(fname)
            if sz > SYNC_ZIP_SIZE or cnt > SYNC_ZIP_COUNT:
                break
            cnt += 1
        z.writestr("_meta", json.dumps(files))
        z.close()
        return f.getvalue(), fnames

    def forgetAdded(self, fnames):
        if not fnames:
            return
        self.db.executemany("delete from log where fname = ?", fnames)
        self.db.commit()

    # Tracking changes (private)
    ##########################################################################

    def _initDB(self):
        self.db.executescript("""
create table media (fname text primary key, csum text, mod int);
create table meta (dirMod int, usn int); insert into meta values (0, 0);
create table log (fname text primary key, type int);
""")

    def _mtime(self, path):
        return int(os.stat(path).st_mtime)

    def _checksum(self, path):
        return checksum(open(path, "rb").read())

    def usn(self):
        return self.db.scalar("select usn from meta")

    def setUsn(self, usn):
        self.db.execute("update meta set usn = ?", usn)
        self.db.commit()

    def syncMod(self):
        self.db.execute("update meta set dirMod = ?", self._mtime(self.dir()))
        self.db.commit()

    def _changed(self):
        "Return dir mtime if it has changed since the last findChanges()"
        # doesn't track edits, but user can add or remove a file to update
        mod = self.db.scalar("select dirMod from meta")
        mtime = self._mtime(self.dir())
        if not self._isFAT32() and mod and mod == mtime:
            return False
        return mtime

    def findChanges(self):
        "Scan the media folder if it's changed, and note any changes."
        if self._changed():
            self._logChanges()

    def _logChanges(self):
        (added, removed) = self._changes()
        log = []
        media = []
        mediaRem = []
        for f in added:
            mt = self._mtime(f)
            media.append((f, self._checksum(f), mt))
            log.append((f, MEDIA_ADD))
        for f in removed:
            mediaRem.append((f,))
            log.append((f, MEDIA_REM))
        # update media db
        self.db.executemany("insert or replace into media values (?,?,?)",
                            media)
        if mediaRem:
            self.db.executemany("delete from media where fname = ?",
                                mediaRem)
        self.db.execute("update meta set dirMod = ?", self._mtime(self.dir()))
        # and logs
        self.db.executemany("insert or replace into log values (?,?)", log)
        self.db.commit()

    def _changes(self):
        self.cache = {}
        for (name, csum, mod) in self.db.execute(
            "select * from media"):
            self.cache[name] = [csum, mod, False]
        added = []
        removed = []
        # loop through on-disk files
        for f in os.listdir(self.dir()):
            # ignore folders and thumbs.db
            if os.path.isdir(f):
                continue
            if f.lower() == "thumbs.db":
                continue
            # and files with invalid chars
            bad = False
            for c in "\0", "/", "\\", ":":
                if c in f:
                    bad = True
                    break
            if bad:
                continue
            # empty files are invalid; clean them up and continue
            if not os.path.getsize(f):
                os.unlink(f)
                continue
            # newly added?
            if f not in self.cache:
                added.append(f)
            else:
                # modified since last time?
                if self._mtime(f) != self.cache[f][1]:
                    # and has different checksum?
                    if self._checksum(f) != self.cache[f][0]:
                        added.append(f)
                # mark as used
                self.cache[f][2] = True
        # look for any entries in the cache that no longer exist on disk
        for (k, v) in self.cache.items():
            if not v[2]:
                removed.append(k)
        return added, removed

    def sanityCheck(self):
        assert not self.db.scalar("select count() from log")
        cnt = self.db.scalar("select count() from media")
        return cnt

    def forceResync(self):
        self.db.execute("delete from media")
        self.db.execute("delete from log")
        self.db.execute("update meta set usn = 0, dirMod = 0")
        self.db.commit()

    def removeExisting(self, files):
        "Remove files from list of files to sync, and return missing files."
        need = []
        remove = []
        for f in files:
            if self.db.execute("select 1 from log where fname=?", f):
                remove.append((f,))
            else:
                need.append(f)
        self.db.executemany("delete from log where fname=?", remove)
        self.db.commit()
        # if we need all the server files, it's faster to pass None than
        # the full list
        if need and len(files) == len(need):
            return None
        return need
Ejemplo n.º 39
0
def Collection(path: str,
               lock: bool = True,
               server: Optional[ServerData] = None,
               log: bool = False) -> _Collection:
    "Open a new or existing collection. Path must be unicode."
    backend = Backend(path)
    # fixme: this call is temporarily here to ensure the brige is working
    # on all platforms, and should be removed in a future beta
    assert backend.plus_one(5) == 6
    assert path.endswith(".anki2")
    path = os.path.abspath(path)
    create = not os.path.exists(path)
    if create:
        base = os.path.basename(path)
        for c in ("/", ":", "\\"):
            assert c not in base
    # connect
    db = DB(path)
    db.setAutocommit(True)
    if create:
        ver = _createDB(db)
    else:
        ver = _upgradeSchema(db)
    db.execute("pragma temp_store = memory")
    db.execute("pragma cache_size = 10000")
    if not isWin:
        db.execute("pragma journal_mode = wal")
    db.setAutocommit(False)
    # add db to col and do any remaining upgrades
    col = _Collection(db, backend=backend, server=server, log=log)
    if ver < SCHEMA_VERSION:
        _upgrade(col, ver)
    elif ver > SCHEMA_VERSION:
        raise Exception("This file requires a newer version of Anki.")
    elif create:
        # add in reverse order so basic is default
        addClozeModel(col)
        addBasicTypingModel(col)
        addForwardOptionalReverse(col)
        addForwardReverse(col)
        addBasicModel(col)
        col.save()
    if lock:
        try:
            col.lock()
        except:
            col.db.close()
            raise
    return col
Ejemplo n.º 40
0
class MediaManager(object):

    soundRegexps = ["(?i)(\[sound:(?P<fname>[^]]+)\])"]
    imgRegexps = [
        # src element quoted case
        "(?i)(<img[^>]* src=(?P<str>[\"'])(?P<fname>[^>]+?)(?P=str)[^>]*>)",
        # unquoted case
        "(?i)(<img[^>]* src=(?!['\"])(?P<fname>[^ >]+)[^>]*?>)",
    ]
    regexps = soundRegexps + imgRegexps

    def __init__(self, col, server):
        self.col = col
        if server:
            self._dir = None
            return
        # media directory
        self._dir = re.sub("(?i)\.(anki2)$", ".media", self.col.path)
        # convert dir to unicode if it's not already
        if isinstance(self._dir, str):
            self._dir = unicode(self._dir, sys.getfilesystemencoding())
        if not os.path.exists(self._dir):
            os.makedirs(self._dir)
        try:
            self._oldcwd = os.getcwd()
        except OSError:
            # cwd doesn't exist
            self._oldcwd = None
        try:
            os.chdir(self._dir)
        except OSError:
            raise Exception("invalidTempFolder")
        # change database
        self.connect()

    def connect(self):
        if self.col.server:
            return
        path = self.dir()+".db2"
        create = not os.path.exists(path)
        os.chdir(self._dir)
        self.db = DB(path)
        if create:
            self._initDB()
        self.maybeUpgrade()

    def _initDB(self):
        self.db.executescript("""
create table media (
 fname text not null primary key,
 csum text,           -- null indicates deleted file
 mtime int not null,  -- zero if deleted
 dirty int not null
);

create index idx_media_dirty on media (dirty);

create table meta (dirMod int, lastUsn int); insert into meta values (0, 0);
""")

    def maybeUpgrade(self):
        oldpath = self.dir()+".db"
        if os.path.exists(oldpath):
            self.db.execute('attach "../collection.media.db" as old')
            try:
                self.db.execute("""
    insert into media
     select m.fname, csum, mod, ifnull((select 1 from log l2 where l2.fname=m.fname), 0) as dirty
     from old.media m
     left outer join old.log l using (fname)
     union
     select fname, null, 0, 1 from old.log where type=1;""")
                self.db.execute("delete from meta")
                self.db.execute("""
    insert into meta select dirMod, usn from old.meta
    """)
                self.db.commit()
            except Exception, e:
                # if we couldn't import the old db for some reason, just start
                # anew
                self.col.log("failed to import old media db:"+traceback.format_exc())
            self.db.execute("detach old")
            npath = "../collection.media.db.old"
            if os.path.exists(npath):
                os.unlink(npath)
            os.rename("../collection.media.db", npath)
Ejemplo n.º 41
0
class ProfileManager(object):
    def __init__(self, base=None, profile=None):
        self.name = None
        # instantiate base folder
        if base:
            self.base = os.path.abspath(base)
        else:
            self.base = self._defaultBase()
        self.ensureBaseExists()
        # load metadata
        self.firstRun = self._loadMeta()
        # did the user request a profile to start up with?
        if profile:
            try:
                self.load(profile)
            except TypeError:
                raise Exception("Provided profile does not exist.")

    # Base creation
    ######################################################################

    def ensureBaseExists(self):
        try:
            self._ensureExists(self.base)
        except:
            # can't translate, as lang not initialized
            QMessageBox.critical(
                None, "Error", """\
Anki can't write to the harddisk. Please see the \
documentation for information on using a flash drive.""")
            raise

    # Profile load/save
    ######################################################################

    def profiles(self):
        return sorted(
            unicode(x, "utf8")
            for x in self.db.list("select name from profiles")
            if x != "_global")

    def load(self, name, passwd=None):
        prof = cPickle.loads(
            self.db.scalar("select data from profiles where name = ?",
                           name.encode("utf8")))
        if prof['key'] and prof['key'] != self._pwhash(passwd):
            self.name = None
            return False
        if name != "_global":
            self.name = name
            self.profile = prof
        return True

    def save(self):
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, cPickle.dumps(self.profile),
                        self.name.encode("utf8"))
        self.db.execute(sql, cPickle.dumps(self.meta), "_global")
        self.db.commit()

    def create(self, name):
        prof = profileConf.copy()
        self.db.execute("insert into profiles values (?, ?)",
                        name.encode("utf8"), cPickle.dumps(prof))
        self.db.commit()

    def remove(self, name):
        p = self.profileFolder()
        if os.path.exists(p):
            send2trash(p)
        self.db.execute("delete from profiles where name = ?",
                        name.encode("utf8"))
        self.db.commit()

    def rename(self, name):
        oldName = self.name
        oldFolder = self.profileFolder()
        self.name = name
        newFolder = self.profileFolder(create=False)
        if os.path.exists(newFolder):
            showWarning(_("Folder already exists."))
            self.name = oldName
            return
        # update name
        self.db.execute("update profiles set name = ? where name = ?",
                        name.encode("utf8"), oldName.encode("utf-8"))
        # rename folder
        os.rename(oldFolder, newFolder)
        self.db.commit()

    # Folder handling
    ######################################################################

    def profileFolder(self, create=True):
        path = os.path.join(self.base, self.name)
        if create:
            self._ensureExists(path)
        return path

    def addonFolder(self):
        return self._ensureExists(os.path.join(self.base, "addons"))

    def backupFolder(self):
        return self._ensureExists(os.path.join(self.profileFolder(),
                                               "backups"))

    def collectionPath(self):
        return os.path.join(self.profileFolder(), "collection.anki2")

    # Helpers
    ######################################################################

    def _ensureExists(self, path):
        if not os.path.exists(path):
            os.makedirs(path)
        return path

    def _defaultBase(self):
        if isWin:
            if False:  #qtmajor >= 5:
                loc = QStandardPaths.writeableLocation(
                    QStandardPaths.DocumentsLocation)
            else:
                loc = QDesktopServices.storageLocation(
                    QDesktopServices.DocumentsLocation)
            return os.path.join(loc, "Anki")
        elif isMac:
            return os.path.expanduser("~/Documents/Anki")
        else:
            return os.path.expanduser("~/Anki")

    def _loadMeta(self):
        path = os.path.join(self.base, "prefs.db")
        new = not os.path.exists(path)

        def recover():
            # if we can't load profile, start with a new one
            os.rename(path, path + ".broken")
            QMessageBox.warning(
                None, "Preferences Corrupt", """\
Anki's prefs.db file was corrupt and has been recreated. If you were using multiple \
profiles, please add them back using the same names to recover your cards.""")

        try:
            self.db = DB(path, text=str)
            self.db.execute("""
create table if not exists profiles
(name text primary key, data text not null);""")
        except:
            recover()
            return self._loadMeta()
        if not new:
            # load previously created
            try:
                self.meta = cPickle.loads(
                    self.db.scalar(
                        "select data from profiles where name = '_global'"))
                return
            except:
                recover()
                return self._loadMeta()
        # create a default global profile
        self.meta = metaConf.copy()
        self.db.execute(
            "insert or replace into profiles values ('_global', ?)",
            cPickle.dumps(metaConf))
        self._setDefaultLang()
        return True

    def ensureProfile(self):
        "Create a new profile if none exists."
        if self.firstRun:
            self.create(_("User 1"))
            p = os.path.join(self.base, "README.txt")
            open(p, "w").write((_("""\
This folder stores all of your Anki data in a single location,
to make backups easy. To tell Anki to use a different location,
please see:

%s
""") % (appHelpSite + "#startupopts")).encode("utf8"))

    def _pwhash(self, passwd):
        return checksum(unicode(self.meta['id']) + unicode(passwd))

    # Default language
    ######################################################################
    # On first run, allow the user to choose the default language

    def _setDefaultLang(self):
        # the dialog expects _ to be defined, but we're running before
        # setupLang() has been called. so we create a dummy op for now
        import __builtin__
        __builtin__.__dict__['_'] = lambda x: x

        # create dialog
        class NoCloseDiag(QDialog):
            def reject(self):
                pass

        d = self.langDiag = NoCloseDiag()
        f = self.langForm = aqt.forms.setlang.Ui_Dialog()
        f.setupUi(d)
        d.connect(d, SIGNAL("accepted()"), self._onLangSelected)
        d.connect(d, SIGNAL("rejected()"), lambda: True)
        # default to the system language
        try:
            (lang, enc) = locale.getdefaultlocale()
        except:
            # fails on osx
            lang = "en"
        if lang and lang not in ("pt_BR", "zh_CN", "zh_TW"):
            lang = re.sub("(.*)_.*", "\\1", lang)
        # find index
        idx = None
        en = None
        for c, (name, code) in enumerate(langs):
            if code == "en":
                en = c
            if code == lang:
                idx = c
        # if the system language isn't available, revert to english
        if idx is None:
            idx = en
        # update list
        f.lang.addItems([x[0] for x in langs])
        f.lang.setCurrentRow(idx)
        d.exec_()

    def _onLangSelected(self):
        f = self.langForm
        obj = langs[f.lang.currentRow()]
        code = obj[1]
        name = obj[0]
        en = "Are you sure you wish to display Anki's interface in %s?"
        r = QMessageBox.question(None, "Anki", en % name,
                                 QMessageBox.Yes | QMessageBox.No,
                                 QMessageBox.No)
        if r != QMessageBox.Yes:
            return self._setDefaultLang()
        self.meta['defaultLang'] = code
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, cPickle.dumps(self.meta), "_global")
        self.db.commit()
Ejemplo n.º 42
0
class MediaManager(object):

    soundRegexps = ["(?i)(\[sound:(?P<fname>[^]]+)\])"]
    imgRegexps = [
        # src element quoted case
        "(?i)(<img[^>]* src=(?P<str>[\"'])(?P<fname>[^>]+?)(?P=str)[^>]*>)",
        # unquoted case
        "(?i)(<img[^>]* src=(?!['\"])(?P<fname>[^ >]+)[^>]*?>)",
    ]
    regexps = soundRegexps + imgRegexps

    def __init__(self, col):
        self.col = col
        # media directory
        self._dir = re.sub("(?i)\.(anki2)$", ".media", self.col.path)
        # convert dir to unicode if it's not already
        if isinstance(self._dir, str):
            self._dir = unicode(self._dir, sys.getfilesystemencoding())
        if not os.path.exists(self._dir):
            os.makedirs(self._dir)
        try:
            self._oldcwd = os.getcwd()
        except OSError:
            # cwd doesn't exist
            self._oldcwd = None
        try:
            os.chdir(self._dir)
        except OSError:
            raise Exception("invalidTempFolder")
        # change database
        self.connect()

    def connect(self):
        path = self.dir() + ".db2"
        create = not os.path.exists(path)
        os.chdir(self._dir)
        self.db = DB(path)
        if create:
            self._initDB()
        self.maybeUpgrade()

    def _initDB(self):
        self.db.executescript("""
create table media (
 fname text not null primary key,
 csum text,           -- null indicates deleted file
 mtime int not null,  -- zero if deleted
 dirty int not null
);

create index idx_media_dirty on media (dirty);

create table meta (dirMod int, lastUsn int); insert into meta values (0, 0);
""")

    def maybeUpgrade(self):
        oldpath = self.dir() + ".db"
        if os.path.exists(oldpath):
            self.db.execute('attach "../collection.media.db" as old')
            try:
                self.db.execute("""
    insert into media
     select m.fname, csum, mod, ifnull((select 1 from log l2 where l2.fname=m.fname), 0) as dirty
     from old.media m
     left outer join old.log l using (fname)
     union
     select fname, null, 0, 1 from old.log where type=1;""")
                self.db.execute("delete from meta")
                self.db.execute("""
    insert into meta select dirMod, usn from old.meta
    """)
                self.db.commit()
            except Exception, e:
                # if we couldn't import the old db for some reason, just start
                # anew
                self.col.log("failed to import old media db:" +
                             traceback.format_exc())
            self.db.execute("detach old")
            npath = "../collection.media.db.old"
            if os.path.exists(npath):
                os.unlink(npath)
            os.rename("../collection.media.db", npath)
Ejemplo n.º 43
0
    def run(self):
        db = DB(self.file)
        ver = db.scalar(
            "select value from global_variables where key='version'")
        assert ver.startswith('Mnemosyne SQL 1') or ver == "2"
        # gather facts into temp objects
        curid = None
        notes = {}
        note = None
        for _id, id, k, v in db.execute("""
select _id, id, key, value from facts f, data_for_fact d where
f._id=d._fact_id"""):
            if id != curid:
                if note:
                    notes[note['_id']] = note
                note = {'_id': _id}
                curid = id
            note[k] = v
        if note:
            notes[note['_id']] = note
        # gather cards
        front = []
        frontback = []
        vocabulary = []
        cloze = {}
        for row in db.execute("""
select _fact_id, fact_view_id, tags, next_rep, last_rep, easiness,
acq_reps+ret_reps, lapses, card_type_id from cards"""):
            # categorize note
            note = notes[row[0]]
            if row[1].endswith(".1"):
                if row[1].startswith("1.") or row[1].startswith("1::"):
                    front.append(note)
                elif row[1].startswith("2.") or row[1].startswith("2::"):
                    frontback.append(note)
                elif row[1].startswith("3.") or row[1].startswith("3::"):
                    vocabulary.append(note)
                elif row[1].startswith("5.1"):
                    cloze[row[0]] = note
            # check for None to fix issue where import can error out
            rawTags = row[2];
            if rawTags is None:
                rawTags = ""
            # merge tags into note
            tags = rawTags.replace(", ", "\x1f").replace(" ", "_")
            tags = tags.replace("\x1f", " ")
            if "tags" not in note:
                note['tags'] = []
            note['tags'] += self.col.tags.split(tags)
            note['tags'] = self.col.tags.canonify(note['tags'])
            # if it's a new card we can go with the defaults
            if row[3] == -1:
                continue
            # add the card
            c = ForeignCard()
            c.factor = int(row[5]*1000)
            c.reps = row[6]
            c.lapses = row[7]
            # ivl is inferred in mnemosyne
            next, prev = row[3:5]
            c.ivl = max(1, (next - prev)/86400)
            # work out how long we've got left
            rem = int((next - time.time())/86400)
            c.due = self.col.sched.today+rem
            # get ord
            m = re.search(".(\d+)$", row[1])
            ord = int(m.group(1))-1
            if 'cards' not in note:
                note['cards'] = {}
            note['cards'][ord] = c
        self._addFronts(front)
        total = self.total
        self._addFrontBacks(frontback)
        total += self.total
        self._addVocabulary(vocabulary)
        self.total += total
        self._addCloze(cloze)
        self.total += total
        self.log.append(ngettext("%d note imported.", "%d notes imported.", self.total) % self.total)
Ejemplo n.º 44
0
class ProfileManager:
    def __init__(self, base: str | None = None) -> None:  #
        ## Settings which should be forgotten each Anki restart
        self.session: dict[str, Any] = {}
        self.name: str | None = None
        self.db: DB | None = None
        self.profile: dict | None = None
        # instantiate base folder
        self.base: str
        self._setBaseFolder(base)

    def setupMeta(self) -> LoadMetaResult:
        # load metadata
        res = self._loadMeta()
        self.firstRun = res.firstTime
        return res

    # profile load on startup
    def openProfile(self, profile: str) -> None:
        if profile:
            if profile not in self.profiles():
                QMessageBox.critical(None, tr.qt_misc_error(),
                                     tr.profiles_profile_does_not_exist())
                sys.exit(1)
            try:
                self.load(profile)
            except TypeError as exc:
                raise Exception("Provided profile does not exist.") from exc

    # Base creation
    ######################################################################

    def ensureBaseExists(self) -> None:
        self._ensureExists(self.base)

    # Profile load/save
    ######################################################################

    def profiles(self) -> list:
        def names() -> list:
            return self.db.list(
                "select name from profiles where name != '_global'")

        n = names()
        if not n:
            self._ensureProfile()
            n = names()

        return n

    def _unpickle(self, data: bytes) -> Any:
        class Unpickler(pickle.Unpickler):
            def find_class(self, class_module: str, name: str) -> Any:
                # handle sip lookup ourselves, mapping to current Qt version
                if class_module == "sip" or class_module.endswith(".sip"):

                    def unpickle_type(module: str, klass: str,
                                      args: Any) -> Any:
                        if qtmajor > 5:
                            module = module.replace("Qt5", "Qt6")
                        else:
                            module = module.replace("Qt6", "Qt5")
                        if klass == "QByteArray":
                            if module.startswith("PyQt4"):
                                # can't trust str objects from python 2
                                return QByteArray()
                            else:
                                # return the bytes directly
                                return args[0]
                        elif name == "_unpickle_enum":
                            if qtmajor == 5:
                                return sip._unpickle_enum(module, klass,
                                                          args)  # type: ignore
                            else:
                                # old style enums can't be unpickled
                                return None
                        else:
                            return sip._unpickle_type(module, klass,
                                                      args)  # type: ignore

                    return unpickle_type
                else:
                    return super().find_class(class_module, name)

        up = Unpickler(io.BytesIO(data), errors="ignore")
        return up.load()

    def _pickle(self, obj: Any) -> bytes:
        for key, val in obj.items():
            if isinstance(val, QByteArray):
                obj[key] = bytes(val)  # type: ignore

        return pickle.dumps(obj, protocol=4)

    def load(self, name: str) -> bool:
        if name == "_global":
            raise Exception("_global is not a valid name")
        data = self.db.scalar(
            "select cast(data as blob) from profiles where name = ?", name)
        self.name = name
        try:
            self.profile = self._unpickle(data)
        except:
            print(traceback.format_exc())
            QMessageBox.warning(
                None,
                tr.profiles_profile_corrupt(),
                tr.profiles_anki_could_not_read_your_profile(),
            )
            print("resetting corrupt profile")
            self.profile = profileConf.copy()
            self.save()
        return True

    def save(self) -> None:
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, self._pickle(self.profile), self.name)
        self.db.execute(sql, self._pickle(self.meta), "_global")
        self.db.commit()

    def create(self, name: str) -> None:
        prof = profileConf.copy()
        self.db.execute("insert or ignore into profiles values (?, ?)", name,
                        self._pickle(prof))
        self.db.commit()

    def remove(self, name: str) -> None:
        p = self.profileFolder()
        if os.path.exists(p):
            send2trash(p)
        self.db.execute("delete from profiles where name = ?", name)
        self.db.commit()

    def trashCollection(self) -> None:
        p = self.collectionPath()
        if os.path.exists(p):
            send2trash(p)

    def rename(self, name: str) -> None:
        oldName = self.name
        oldFolder = self.profileFolder()
        self.name = name
        newFolder = self.profileFolder(create=False)
        if os.path.exists(newFolder):
            if (oldFolder != newFolder) and (oldFolder.lower()
                                             == newFolder.lower()):
                # OS is telling us the folder exists because it does not take
                # case into account; use a temporary folder location
                midFolder = "".join([oldFolder, "-temp"])
                if not os.path.exists(midFolder):
                    os.rename(oldFolder, midFolder)
                    oldFolder = midFolder
                else:
                    showWarning(
                        tr.profiles_please_remove_the_folder_and(
                            val=midFolder))
                    self.name = oldName
                    return
            else:
                showWarning(tr.profiles_folder_already_exists())
                self.name = oldName
                return

        # update name
        self.db.execute("update profiles set name = ? where name = ?", name,
                        oldName)
        # rename folder
        try:
            os.rename(oldFolder, newFolder)
        except Exception as e:
            self.db.rollback()
            if "WinError 5" in str(e):
                showWarning(tr.profiles_anki_could_not_rename_your_profile())
            else:
                raise
        except:
            self.db.rollback()
            raise
        else:
            self.db.commit()

    # Folder handling
    ######################################################################

    def profileFolder(self, create: bool = True) -> str:
        path = os.path.join(self.base, self.name)
        if create:
            self._ensureExists(path)
        return path

    def addonFolder(self) -> str:
        return self._ensureExists(os.path.join(self.base, "addons21"))

    def backupFolder(self) -> str:
        return self._ensureExists(os.path.join(self.profileFolder(),
                                               "backups"))

    def collectionPath(self) -> str:
        return os.path.join(self.profileFolder(), "collection.anki2")

    # Downgrade
    ######################################################################

    def downgrade(self, profiles: list[str]) -> list[str]:
        "Downgrade all profiles. Return a list of profiles that couldn't be opened."
        problem_profiles = []
        for name in profiles:
            path = os.path.join(self.base, name, "collection.anki2")
            if not os.path.exists(path):
                continue
            with DB(path) as db:
                if db.scalar("select ver from col") == 11:
                    # nothing to do
                    continue
            try:
                c = Collection(path)
                c.close(save=False, downgrade=True)
            except Exception as e:
                print(e)
                problem_profiles.append(name)
        return problem_profiles

    # Helpers
    ######################################################################

    def _ensureExists(self, path: str) -> str:
        if not os.path.exists(path):
            os.makedirs(path)
        return path

    def _setBaseFolder(self, cmdlineBase: str | None) -> None:
        if cmdlineBase:
            self.base = os.path.abspath(cmdlineBase)
        elif os.environ.get("ANKI_BASE"):
            self.base = os.path.abspath(os.environ["ANKI_BASE"])
        else:
            self.base = self._defaultBase()
        self.ensureBaseExists()

    def _defaultBase(self) -> str:
        if is_win:
            from aqt.winpaths import get_appdata

            return os.path.join(get_appdata(), "Anki2")
        elif is_mac:
            return os.path.expanduser("~/Library/Application Support/Anki2")
        else:
            dataDir = os.environ.get("XDG_DATA_HOME",
                                     os.path.expanduser("~/.local/share"))
            if not os.path.exists(dataDir):
                os.makedirs(dataDir)
            return os.path.join(dataDir, "Anki2")

    def _loadMeta(self, retrying: bool = False) -> LoadMetaResult:
        result = LoadMetaResult()
        result.firstTime = False
        result.loadError = retrying

        opath = os.path.join(self.base, "prefs.db")
        path = os.path.join(self.base, "prefs21.db")
        if not retrying and os.path.exists(opath) and not os.path.exists(path):
            shutil.copy(opath, path)

        result.firstTime = not os.path.exists(path)

        def recover() -> None:
            # if we can't load profile, start with a new one
            if self.db:
                try:
                    self.db.close()
                except:
                    pass
            for suffix in ("", "-journal"):
                fpath = path + suffix
                if os.path.exists(fpath):
                    os.unlink(fpath)

        # open DB file and read data
        try:
            self.db = DB(path)
            if not self.db.scalar("pragma integrity_check") == "ok":
                raise Exception("corrupt db")
            self.db.execute("""
create table if not exists profiles
(name text primary key, data blob not null);""")
            data = self.db.scalar(
                "select cast(data as blob) from profiles where name = '_global'"
            )
        except:
            traceback.print_stack()
            if result.loadError:
                # already failed, prevent infinite loop
                raise
            # delete files and try again
            recover()
            return self._loadMeta(retrying=True)

        # try to read data
        if not result.firstTime:
            try:
                self.meta = self._unpickle(data)
                return result
            except:
                traceback.print_stack()
                print("resetting corrupt _global")
                result.loadError = True
                result.firstTime = True

        # if new or read failed, create a default global profile
        self.meta = metaConf.copy()
        self.db.execute(
            "insert or replace into profiles values ('_global', ?)",
            self._pickle(metaConf),
        )
        return result

    def _ensureProfile(self) -> None:
        "Create a new profile if none exists."
        self.create(tr.profiles_user_1())
        p = os.path.join(self.base, "README.txt")
        with open(p, "w", encoding="utf8") as file:
            file.write(
                without_unicode_isolation(
                    tr.profiles_folder_readme(
                        link=f"{appHelpSite}files#startup-options", )) + "\n")

    # Default language
    ######################################################################
    # On first run, allow the user to choose the default language

    def setDefaultLang(self, idx: int) -> None:
        # create dialog
        class NoCloseDiag(QDialog):
            def reject(self) -> None:
                pass

        d = self.langDiag = NoCloseDiag()
        f = self.langForm = aqt.forms.setlang.Ui_Dialog()
        f.setupUi(d)
        disable_help_button(d)
        qconnect(d.accepted, self._onLangSelected)
        qconnect(d.rejected, lambda: True)
        # update list
        f.lang.addItems([x[0] for x in anki.lang.langs])
        f.lang.setCurrentRow(idx)
        d.exec()

    def _onLangSelected(self) -> None:
        f = self.langForm
        obj = anki.lang.langs[f.lang.currentRow()]
        code = obj[1]
        name = obj[0]
        r = QMessageBox.question(
            None,
            "Anki",
            tr.profiles_confirm_lang_choice(lang=name),
            QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No,
            QMessageBox.StandardButton.No  # type: ignore
        )
        if r != QMessageBox.StandardButton.Yes:
            return self.setDefaultLang(f.lang.currentRow())
        self.setLang(code)

    def setLang(self, code: str) -> None:
        self.meta["defaultLang"] = code
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, self._pickle(self.meta), "_global")
        self.db.commit()
        anki.lang.set_lang(code)

    # OpenGL
    ######################################################################

    def _gldriver_path(self) -> str:
        return os.path.join(self.base, "gldriver")

    def video_driver(self) -> VideoDriver:
        path = self._gldriver_path()
        try:
            with open(path, encoding="utf8") as file:
                text = file.read().strip()
                return VideoDriver(text).constrained_to_platform()
        except (ValueError, OSError):
            return VideoDriver.default_for_platform()

    def set_video_driver(self, driver: VideoDriver) -> None:
        with open(self._gldriver_path(), "w", encoding="utf8") as file:
            file.write(driver.value)

    def set_next_video_driver(self) -> None:
        self.set_video_driver(self.video_driver().next())

    # Shared options
    ######################################################################

    def uiScale(self) -> float:
        scale = self.meta.get("uiScale", 1.0)
        return max(scale, 1)

    def setUiScale(self, scale: float) -> None:
        self.meta["uiScale"] = scale

    def last_addon_update_check(self) -> int:
        return self.meta.get("last_addon_update_check", 0)

    def set_last_addon_update_check(self, secs: int) -> None:
        self.meta["last_addon_update_check"] = secs

    def night_mode(self) -> bool:
        return self.meta.get("night_mode", False)

    def set_night_mode(self, on: bool) -> None:
        self.meta["night_mode"] = on

    def theme(self) -> Theme:
        return Theme(self.meta.get("theme", 0))

    def set_theme(self, theme: Theme) -> None:
        self.meta["theme"] = theme.value

    def dark_mode_widgets(self) -> bool:
        return self.meta.get("dark_mode_widgets", False)

    # Profile-specific
    ######################################################################

    def set_sync_key(self, val: str | None) -> None:
        self.profile["syncKey"] = val

    def set_sync_username(self, val: str | None) -> None:
        self.profile["syncUser"] = val

    def set_host_number(self, val: int | None) -> None:
        self.profile["hostNum"] = val or 0

    def media_syncing_enabled(self) -> bool:
        return self.profile["syncMedia"]

    def auto_syncing_enabled(self) -> bool:
        return self.profile["autoSync"]

    def sync_auth(self) -> SyncAuth | None:
        hkey = self.profile.get("syncKey")
        if not hkey:
            return None
        return SyncAuth(hkey=hkey, host_number=self.profile.get("hostNum", 0))

    def clear_sync_auth(self) -> None:
        self.profile["syncKey"] = None
        self.profile["syncUser"] = None
        self.profile["hostNum"] = 0

    def auto_sync_media_minutes(self) -> int:
        return self.profile.get("autoSyncMediaMinutes", 15)

    def set_auto_sync_media_minutes(self, val: int) -> None:
        self.profile["autoSyncMediaMinutes"] = val

    def show_browser_table_tooltips(self) -> bool:
        return self.profile.get("browserTableTooltips", True)

    def set_show_browser_table_tooltips(self, val: bool) -> None:
        self.profile["browserTableTooltips"] = val
Ejemplo n.º 45
0
class ProfileManager(object):

    def __init__(self, base=None, profile=None):
        self.name = None
        # instantiate base folder
        self.base = base or self._defaultBase()
        self.ensureLocalFS()
        self.ensureBaseExists()
        # load metadata
        self.firstRun = self._loadMeta()
        # did the user request a profile to start up with?
        if profile:
            try:
                self.load(profile)
            except TypeError:
                raise Exception("Provided profile does not exist.")

    # Base creation
    ######################################################################

    def ensureLocalFS(self):
        if self.base.startswith("\\\\"):
            QMessageBox.critical(
                None, "Error", """\
To use Anki on a network share, the share must be mapped to a local drive \
letter. Please see the 'File Locations' section of the manual for more \
information.""")
            raise Exception("unc")

    def ensureBaseExists(self):
        try:
            self._ensureExists(self.base)
        except:
            # can't translate, as lang not initialized
            QMessageBox.critical(
                None, "Error", """\
Anki can't write to the harddisk. Please see the \
documentation for information on using a flash drive.""")
            raise

    # Profile load/save
    ######################################################################

    def profiles(self):
        return sorted(
            unicode(x, "utf8") for x in
            self.db.list("select name from profiles")
            if x != "_global")

    def load(self, name, passwd=None):
        prof = cPickle.loads(
            self.db.scalar("select data from profiles where name = ?",
                           name.encode("utf8")))
        if prof['key'] and prof['key'] != self._pwhash(passwd):
            self.name = None
            return False
        if name != "_global":
            self.name = name
            self.profile = prof
        return True

    def save(self):
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, cPickle.dumps(self.profile),
                        self.name.encode("utf8"))
        self.db.execute(sql, cPickle.dumps(self.meta), "_global")
        self.db.commit()

    def create(self, name):
        prof = profileConf.copy()
        self.db.execute("insert into profiles values (?, ?)",
                        name.encode("utf8"), cPickle.dumps(prof))
        self.db.commit()

    def remove(self, name):
        shutil.rmtree(self.profileFolder())
        self.db.execute("delete from profiles where name = ?",
                        name.encode("utf8"))
        self.db.commit()

    def rename(self, name):
        oldName = self.name
        oldFolder = self.profileFolder()
        self.name = name
        newFolder = self.profileFolder(create=False)
        if os.path.exists(newFolder):
            showWarning(_("Folder already exists."))
            self.name = oldName
            return
        # update name
        self.db.execute("update profiles set name = ? where name = ?",
                        name.encode("utf8"), oldName.encode("utf-8"))
        # rename folder
        os.rename(oldFolder, newFolder)
        self.db.commit()

    # Folder handling
    ######################################################################

    def profileFolder(self, create=True):
        path = os.path.join(self.base, self.name)
        if create:
            self._ensureExists(path)
        return path

    def addonFolder(self):
        return self._ensureExists(os.path.join(self.base, "addons"))

    def backupFolder(self):
        return self._ensureExists(
            os.path.join(self.profileFolder(), "backups"))

    def collectionPath(self):
        return os.path.join(self.profileFolder(), "collection.anki2")

    # Helpers
    ######################################################################

    def _ensureExists(self, path):
        if not os.path.exists(path):
            os.makedirs(path)
        return path

    def _defaultBase(self):
        if isWin:
            s = QSettings(QSettings.UserScope, "Microsoft", "Windows")
            s.beginGroup("CurrentVersion/Explorer/Shell Folders")
            d = s.value("Personal")
            return os.path.join(d, "Anki")
        elif isMac:
            return os.path.expanduser("~/Documents/Anki")
        else:
            return os.path.expanduser("~/Anki")

    def _loadMeta(self):
        path = os.path.join(self.base, "prefs.db")
        new = not os.path.exists(path)
        self.db = DB(path, text=str)
        self.db.execute("""
create table if not exists profiles
(name text primary key, data text not null);""")
        if new:
            # create a default global profile
            self.meta = metaConf.copy()
            self.db.execute("insert into profiles values ('_global', ?)",
                            cPickle.dumps(metaConf))
            self._setDefaultLang()
            return True
        else:
            # load previously created
            self.meta = cPickle.loads(
                self.db.scalar(
                    "select data from profiles where name = '_global'"))

    def ensureProfile(self):
        "Create a new profile if none exists."
        if self.firstRun:
            self.create(_("User 1"))
            p = os.path.join(self.base, "README.txt")
            open(p, "w").write((_("""\
This folder stores all of your Anki data in a single location,
to make backups easy. To tell Anki to use a different location,
please see:

%s
""") % (appHelpSite +  "#startupopts")).encode("utf8"))

    def _pwhash(self, passwd):
        return checksum(unicode(self.meta['id'])+unicode(passwd))

    # Default language
    ######################################################################
    # On first run, allow the user to choose the default language

    def _setDefaultLang(self):
        # the dialog expects _ to be defined, but we're running before
        # setupLang() has been called. so we create a dummy op for now
        import __builtin__
        __builtin__.__dict__['_'] = lambda x: x
        # create dialog
        class NoCloseDiag(QDialog):
            def reject(self):
                pass
        d = self.langDiag = NoCloseDiag()
        f = self.langForm = aqt.forms.setlang.Ui_Dialog()
        f.setupUi(d)
        d.connect(d, SIGNAL("accepted()"), self._onLangSelected)
        d.connect(d, SIGNAL("rejected()"), lambda: True)
        # default to the system language
        try:
            (lang, enc) = locale.getdefaultlocale()
        except:
            # fails on osx
            lang = "en"
        if lang and lang not in ("pt_BR", "zh_CN", "zh_TW"):
            lang = re.sub("(.*)_.*", "\\1", lang)
        # find index
        idx = None
        en = None
        for c, (name, code) in enumerate(langs):
            if code == "en":
                en = c
            if code == lang:
                idx = c
        # if the system language isn't available, revert to english
        if idx is None:
            idx = en
        # update list
        f.lang.addItems([x[0] for x in langs])
        f.lang.setCurrentRow(idx)
        d.exec_()

    def _onLangSelected(self):
        f = self.langForm
        code = langs[f.lang.currentRow()][1]
        self.meta['defaultLang'] = code
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, cPickle.dumps(self.meta), "_global")
        self.db.commit()
Ejemplo n.º 46
0
class MediaManager(object):

    soundRegexps = ["(?i)(\[sound:(?P<fname>[^]]+)\])"]
    imgRegexps = [
        # src element quoted case
        "(?i)(<img[^>]+src=(?P<str>[\"'])(?P<fname>[^>]+?)(?P=str)[^>]*>)",
        # unquoted case
        "(?i)(<img[^>]+src=(?!['\"])(?P<fname>[^ >]+)[^>]*?>)",
    ]
    regexps = soundRegexps + imgRegexps

    def __init__(self, col, server):
        self.col = col
        if server:
            self._dir = None
            return
        # media directory
        self._dir = re.sub("(?i)\.(anki2)$", ".media", self.col.path)
        # convert dir to unicode if it's not already
        if isinstance(self._dir, str):
            self._dir = unicode(self._dir, sys.getfilesystemencoding())
        if not os.path.exists(self._dir):
            os.makedirs(self._dir)
        try:
            self._oldcwd = os.getcwd()
        except OSError:
            # cwd doesn't exist
            self._oldcwd = None
        os.chdir(self._dir)
        # change database
        self.connect()

    def connect(self):
        if self.col.server:
            return
        path = self.dir() + ".db"
        create = not os.path.exists(path)
        os.chdir(self._dir)
        self.db = DB(path)
        if create:
            self._initDB()

    def close(self):
        if self.col.server:
            return
        self.db.close()
        self.db = None
        # change cwd back to old location
        if self._oldcwd:
            try:
                os.chdir(self._oldcwd)
            except:
                # may have been deleted
                pass

    def dir(self):
        return self._dir

    def _isFAT32(self):
        if not isWin:
            return
        import win32api, win32file
        try:
            name = win32file.GetVolumeNameForVolumeMountPoint(self._dir[:3])
        except:
            # mapped & unmapped network drive; pray that it's not vfat
            return
        if win32api.GetVolumeInformation(name)[4].lower().startswith("fat"):
            return True

    # Adding media
    ##########################################################################

    def addFile(self, opath):
        return self.writeData(opath, open(opath, "rb").read())

    def writeData(self, opath, data):
        # if fname is a full path, use only the basename
        fname = os.path.basename(opath)
        # remove any dangerous characters
        base = self.stripIllegal(fname)
        (root, ext) = os.path.splitext(base)

        def repl(match):
            n = int(match.group(1))
            return " (%d)" % (n + 1)

        # find the first available name
        csum = checksum(data)
        while True:
            fname = root + ext
            path = os.path.join(self.dir(), fname)
            # if it doesn't exist, copy it directly
            if not os.path.exists(path):
                open(path, "wb").write(data)
                return fname
            # if it's identical, reuse
            if checksum(open(path, "rb").read()) == csum:
                return fname
            # otherwise, increment the index in the filename
            reg = " \((\d+)\)$"
            if not re.search(reg, root):
                root = root + " (1)"
            else:
                root = re.sub(reg, repl, root)

    # String manipulation
    ##########################################################################

    def filesInStr(self, mid, string, includeRemote=False):
        l = []
        model = self.col.models.get(mid)
        strings = []
        if model['type'] == MODEL_CLOZE and "{{c" in string:
            # if the field has clozes in it, we'll need to expand the
            # possibilities so we can render latex
            strings = self._expandClozes(string)
        else:
            strings = [string]
        for string in strings:
            # handle latex
            string = mungeQA(string, None, None, model, None, self.col)
            # extract filenames
            for reg in self.regexps:
                for match in re.finditer(reg, string):
                    fname = match.group("fname")
                    isLocal = not re.match("(https?|ftp)://", fname.lower())
                    if isLocal or includeRemote:
                        l.append(fname)
        return l

    def _expandClozes(self, string):
        ords = set(re.findall("{{c(\d+)::.+?}}", string))
        strings = []
        from anki.template.template import clozeReg

        def qrepl(m):
            if m.group(3):
                return "[%s]" % m.group(3)
            else:
                return "[...]"

        def arepl(m):
            return m.group(1)

        for ord in ords:
            s = re.sub(clozeReg % ord, qrepl, string)
            s = re.sub(clozeReg % ".+?", "\\1", s)
            strings.append(s)
        strings.append(re.sub(clozeReg % ".+?", arepl, string))
        return strings

    def transformNames(self, txt, func):
        for reg in self.regexps:
            txt = re.sub(reg, func, txt)
        return txt

    def strip(self, txt):
        for reg in self.regexps:
            txt = re.sub(reg, "", txt)
        return txt

    def escapeImages(self, string):
        def repl(match):
            tag = match.group(0)
            fname = match.group("fname")
            if re.match("(https?|ftp)://", fname):
                return tag
            return tag.replace(fname, urllib.quote(fname.encode("utf-8")))

        for reg in self.imgRegexps:
            string = re.sub(reg, repl, string)
        return string

    # Rebuilding DB
    ##########################################################################

    def check(self, local=None):
        "Return (missingFiles, unusedFiles)."
        mdir = self.dir()
        # generate card q/a and look through all references
        normrefs = {}

        def norm(s):
            if isinstance(s, unicode) and isMac:
                return unicodedata.normalize('NFD', s)
            return s

        for f in self.allMedia():
            normrefs[norm(f)] = True
        # loop through directory and find unused & missing media
        unused = []
        if local is None:
            files = os.listdir(mdir)
        else:
            files = local
        for file in files:
            if not local:
                path = os.path.join(mdir, file)
                if not os.path.isfile(path):
                    # ignore directories
                    continue
                if file.startswith("_"):
                    # leading _ says to ignore file
                    continue
            nfile = norm(file)
            if nfile not in normrefs:
                unused.append(file)
            else:
                del normrefs[nfile]
        nohave = [x for x in normrefs.keys() if not x.startswith("_")]
        return (nohave, unused)

    def allMedia(self):
        "Return a set of all referenced filenames."
        files = set()
        for mid, flds in self.col.db.execute("select mid, flds from notes"):
            for f in self.filesInStr(mid, flds):
                files.add(f)
        return files

    # Copying on import
    ##########################################################################

    def have(self, fname):
        return os.path.exists(os.path.join(self.dir(), fname))

    # Media syncing - changes and removal
    ##########################################################################

    def hasChanged(self):
        return self.db.scalar("select 1 from log limit 1")

    def removed(self):
        return self.db.list("select * from log where type = ?", MEDIA_REM)

    def syncRemove(self, fnames):
        # remove provided deletions
        for f in fnames:
            if os.path.exists(f):
                send2trash.send2trash(f)
            self.db.execute("delete from log where fname = ?", f)
            self.db.execute("delete from media where fname = ?", f)
        # and all locally-logged deletions, as server has acked them
        self.db.execute("delete from log where type = ?", MEDIA_REM)
        self.db.commit()

    # Media syncing - unbundling zip files from server
    ##########################################################################

    def syncAdd(self, zipData):
        "Extract zip data; true if finished."
        f = StringIO(zipData)
        z = zipfile.ZipFile(f, "r")
        finished = False
        meta = None
        media = []
        # get meta info first
        meta = json.loads(z.read("_meta"))
        nextUsn = int(z.read("_usn"))
        # then loop through all files
        for i in z.infolist():
            if i.filename == "_meta" or i.filename == "_usn":
                # ignore previously-retrieved meta
                continue
            elif i.filename == "_finished":
                # last zip in set
                finished = True
            else:
                data = z.read(i)
                csum = checksum(data)
                name = meta[i.filename]
                # save file
                open(name, "wb").write(data)
                # update db
                media.append((name, csum, self._mtime(name)))
                # remove entries from local log
                self.db.execute("delete from log where fname = ?", name)
        # update media db and note new starting usn
        if media:
            self.db.executemany("insert or replace into media values (?,?,?)",
                                media)
        self.setUsn(nextUsn)  # commits
        # if we have finished adding, we need to record the new folder mtime
        # so that we don't trigger a needless scan
        if finished:
            self.syncMod()
        return finished

    # Illegal characters
    ##########################################################################

    _illegalCharReg = re.compile(r'[][><:"/?*^\\|\0]')

    def stripIllegal(self, str):
        return re.sub(self._illegalCharReg, "", str)

    def hasIllegal(self, str):
        return not not re.search(self._illegalCharReg, str)

    # Media syncing - bundling zip files to send to server
    ##########################################################################
    # Because there's no standard filename encoding for zips, and because not
    # all zip clients support retrieving mtime, we store the files as ascii
    # and place a json file in the zip with the necessary information.

    def zipAdded(self):
        "Add files to a zip until over SYNC_ZIP_SIZE/COUNT. Return zip data."
        f = StringIO()
        z = zipfile.ZipFile(f, "w", compression=zipfile.ZIP_DEFLATED)
        sz = 0
        cnt = 0
        files = {}
        cur = self.db.execute("select fname from log where type = ?",
                              MEDIA_ADD)
        fnames = []
        while 1:
            fname = cur.fetchone()
            if not fname:
                # add a flag so the server knows it can clean up
                z.writestr("_finished", "")
                break
            fname = fname[0]
            fnames.append([fname])
            z.write(fname, str(cnt))
            files[str(cnt)] = fname
            sz += os.path.getsize(fname)
            if sz > SYNC_ZIP_SIZE or cnt > SYNC_ZIP_COUNT:
                break
            cnt += 1
        z.writestr("_meta", json.dumps(files))
        z.close()
        return f.getvalue(), fnames

    def forgetAdded(self, fnames):
        if not fnames:
            return
        self.db.executemany("delete from log where fname = ?", fnames)
        self.db.commit()

    # Tracking changes (private)
    ##########################################################################

    def _initDB(self):
        self.db.executescript("""
create table media (fname text primary key, csum text, mod int);
create table meta (dirMod int, usn int); insert into meta values (0, 0);
create table log (fname text primary key, type int);
""")

    def _mtime(self, path):
        return int(os.stat(path).st_mtime)

    def _checksum(self, path):
        return checksum(open(path, "rb").read())

    def usn(self):
        return self.db.scalar("select usn from meta")

    def setUsn(self, usn):
        self.db.execute("update meta set usn = ?", usn)
        self.db.commit()

    def syncMod(self):
        self.db.execute("update meta set dirMod = ?", self._mtime(self.dir()))
        self.db.commit()

    def _changed(self):
        "Return dir mtime if it has changed since the last findChanges()"
        # doesn't track edits, but user can add or remove a file to update
        mod = self.db.scalar("select dirMod from meta")
        mtime = self._mtime(self.dir())
        if not self._isFAT32() and mod and mod == mtime:
            return False
        return mtime

    def findChanges(self):
        "Scan the media folder if it's changed, and note any changes."
        if self._changed():
            self._logChanges()

    def _logChanges(self):
        (added, removed) = self._changes()
        log = []
        media = []
        mediaRem = []
        for f in added:
            mt = self._mtime(f)
            media.append((f, self._checksum(f), mt))
            log.append((f, MEDIA_ADD))
        for f in removed:
            mediaRem.append((f, ))
            log.append((f, MEDIA_REM))
        # update media db
        self.db.executemany("insert or replace into media values (?,?,?)",
                            media)
        if mediaRem:
            self.db.executemany("delete from media where fname = ?", mediaRem)
        self.db.execute("update meta set dirMod = ?", self._mtime(self.dir()))
        # and logs
        self.db.executemany("insert or replace into log values (?,?)", log)
        self.db.commit()

    def _changes(self):
        self.cache = {}
        for (name, csum, mod) in self.db.execute("select * from media"):
            self.cache[name] = [csum, mod, False]
        added = []
        removed = []
        # loop through on-disk files
        for f in os.listdir(self.dir()):
            # ignore folders and thumbs.db
            if os.path.isdir(f):
                continue
            if f.lower() == "thumbs.db":
                continue
            # and files with invalid chars
            if self.hasIllegal(f):
                continue
            # empty files are invalid; clean them up and continue
            if not os.path.getsize(f):
                os.unlink(f)
                continue
            # newly added?
            if f not in self.cache:
                added.append(f)
            else:
                # modified since last time?
                if self._mtime(f) != self.cache[f][1]:
                    # and has different checksum?
                    if self._checksum(f) != self.cache[f][0]:
                        added.append(f)
                # mark as used
                self.cache[f][2] = True
        # look for any entries in the cache that no longer exist on disk
        for (k, v) in self.cache.items():
            if not v[2]:
                removed.append(k)
        return added, removed

    def sanityCheck(self):
        assert not self.db.scalar("select count() from log")
        cnt = self.db.scalar("select count() from media")
        return cnt

    def forceResync(self):
        self.db.execute("delete from media")
        self.db.execute("delete from log")
        self.db.execute("update meta set usn = 0, dirMod = 0")
        self.db.commit()

    def removeExisting(self, files):
        "Remove files from list of files to sync, and return missing files."
        need = []
        remove = []
        for f in files:
            if self.db.scalar("select 1 from log where fname=?", f):
                remove.append((f, ))
            else:
                need.append(f)
        self.db.executemany("delete from log where fname=?", remove)
        self.db.commit()
        # if we need all the server files, it's faster to pass None than
        # the full list
        if need and len(files) == len(need):
            return None
        return need
Ejemplo n.º 47
0
class MediaManager(object):

    # other code depends on this order, so don't reorder
    regexps = ("(?i)(\[sound:([^]]+)\])",
               "(?i)(<img[^>]+src=[\"']?([^\"'>]+)[\"']?[^>]*>)")

    def __init__(self, col):
        self.col = col
        # media directory
        self._dir = re.sub("(?i)\.(anki2)$", ".media", self.col.path)
        if not os.path.exists(self._dir):
            os.makedirs(self._dir)
        self._oldcwd = os.getcwd()
        os.chdir(self._dir)
        # change database
        self.connect()

    def connect(self):
        if self.col.server:
            return
        path = self.dir()+".db"
        create = not os.path.exists(path)
        self.db = DB(path)
        if create:
            self._initDB()

    def close(self):
        if self.col.server:
            return
        self.db.close()
        self.db = None
        # change cwd back to old location
        os.chdir(self._oldcwd)

    def dir(self):
        return self._dir

    # Adding media
    ##########################################################################

    def addFile(self, opath):
        """Copy PATH to MEDIADIR, and return new filename.
If the same name exists, compare checksums."""
        mdir = self.dir()
        # remove any dangerous characters
        base = re.sub(r"[][<>:/\\&]", "", os.path.basename(opath))
        dst = os.path.join(mdir, base)
        # if it doesn't exist, copy it directly
        if not os.path.exists(dst):
            shutil.copy2(opath, dst)
            return base
        # if it's identical, reuse
        if self.filesIdentical(opath, dst):
            return base
        # otherwise, find a unique name
        (root, ext) = os.path.splitext(base)
        def repl(match):
            n = int(match.group(1))
            return " (%d)" % (n+1)
        while True:
            path = os.path.join(mdir, root + ext)
            if not os.path.exists(path):
                break
            reg = " \((\d+)\)$"
            if not re.search(reg, root):
                root = root + " (1)"
            else:
                root = re.sub(reg, repl, root)
        # copy and return
        shutil.copy2(opath, path)
        return os.path.basename(os.path.basename(path))

    def filesIdentical(self, path1, path2):
        "True if files are the same."
        return (checksum(open(path1, "rb").read()) ==
                checksum(open(path2, "rb").read()))

    # String manipulation
    ##########################################################################

    def filesInStr(self, mid, string, includeRemote=False):
        l = []
        # convert latex first
        model = self.col.models.get(mid)
        string = mungeQA(string, None, None, model, None, self.col)
        # extract filenames
        for reg in self.regexps:
            for (full, fname) in re.findall(reg, string):
                isLocal = not re.match("(https?|ftp)://", fname.lower())
                if isLocal or includeRemote:
                    l.append(fname)
        return l

    def strip(self, txt):
        for reg in self.regexps:
            txt = re.sub(reg, "", txt)
        return txt

    def escapeImages(self, string):
        # Feeding webkit unicode can result in it not finding images, so on
        # linux/osx we percent escape the image paths as utf8. On Windows the
        # problem is more complicated - if we percent-escape as utf8 it fixes
        # some images but breaks others. When filenames are normalized by
        # dropbox they become unreadable if we escape them.
        if isWin:
            return string
        def repl(match):
            tag = match.group(1)
            fname = match.group(2)
            if re.match("(https?|ftp)://", fname):
                return tag
            return tag.replace(
                fname, urllib.quote(fname.encode("utf-8")))
        return re.sub(self.regexps[1], repl, string)

    # Rebuilding DB
    ##########################################################################

    def check(self, local=None):
        "Return (missingFiles, unusedFiles)."
        mdir = self.dir()
        # generate card q/a and look through all references
        normrefs = {}
        def norm(s):
            if isinstance(s, unicode):
                return unicodedata.normalize('NFD', s)
            return s
        for f in self.allMedia():
            normrefs[norm(f)] = True
        # loop through directory and find unused & missing media
        unused = []
        if local is None:
            files = os.listdir(mdir)
        else:
            files = local
        for file in files:
            if not local:
                path = os.path.join(mdir, file)
                if not os.path.isfile(path):
                    # ignore directories
                    continue
            nfile = norm(file)
            if nfile not in normrefs:
                unused.append(file)
            else:
                del normrefs[nfile]
        nohave = normrefs.keys()
        return (nohave, unused)

    def allMedia(self):
        "Return a set of all referenced filenames."
        files = set()
        for mid, flds in self.col.db.execute("select mid, flds from notes"):
            for f in self.filesInStr(mid, flds):
                files.add(f)
        return files

    # Copying on import
    ##########################################################################
    # FIXME: check if the files are actually identical, and rewrite references
    # if necessary

    def copyTo(self, rdir):
        "Copy media to RDIR. Return number of files copied."
        ldir = self.dir()
        if not os.path.exists(ldir):
            return 0
        cnt = 0
        for f in os.listdir(ldir):
            src = os.path.join(ldir, f)
            dst = os.path.join(rdir, f)
            if not os.path.exists(dst):
                shutil.copy2(src, dst)
            cnt += 1
        return cnt

    # Media syncing - changes and removal
    ##########################################################################

    def hasChanged(self):
        return self.db.scalar("select 1 from log limit 1")

    def removed(self):
        return self.db.list("select * from log where type = ?", MEDIA_REM)

    def syncRemove(self, fnames):
        # remove provided deletions
        for f in fnames:
            if os.path.exists(f):
                os.unlink(f)
            self.db.execute("delete from log where fname = ?", f)
            self.db.execute("delete from media where fname = ?", f)
        # and all locally-logged deletions, as server has acked them
        self.db.execute("delete from log where type = ?", MEDIA_REM)
        self.db.commit()

    # Media syncing - unbundling zip files from server
    ##########################################################################

    def syncAdd(self, zipData):
        "Extract zip data; true if finished."
        f = StringIO(zipData)
        z = zipfile.ZipFile(f, "r")
        finished = False
        meta = None
        media = []
        sizecnt = 0
        # get meta info first
        assert z.getinfo("_meta").file_size < 100000
        meta = simplejson.loads(z.read("_meta"))
        nextUsn = int(z.read("_usn"))
        # then loop through all files
        for i in z.infolist():
            # check for zip bombs
            sizecnt += i.file_size
            assert sizecnt < 100*1024*1024
            if i.filename == "_meta" or i.filename == "_usn":
                # ignore previously-retrieved meta
                continue
            elif i.filename == "_finished":
                # last zip in set
                finished = True
            else:
                data = z.read(i)
                csum = checksum(data)
                name = meta[i.filename]
                # can we store the file on this system?
                if self.illegal(i.filename):
                    continue
                # save file
                open(name, "wb").write(data)
                # update db
                media.append((name, csum, self._mtime(name)))
                # remove entries from local log
                self.db.execute("delete from log where fname = ?", name)
        # update media db and note new starting usn
        if media:
            self.db.executemany(
                "insert or replace into media values (?,?,?)", media)
        self.setUsn(nextUsn) # commits
        # if we have finished adding, we need to record the new folder mtime
        # so that we don't trigger a needless scan
        if finished:
            self.syncMod()
        return finished

    def illegal(self, f):
        if isWin:
            for c in f:
                if c in "<>:\"/\\|?*^":
                    return True
        elif isMac:
            for c in f:
                if c in ":\\/":
                    return True

    # Media syncing - bundling zip files to send to server
    ##########################################################################
    # Because there's no standard filename encoding for zips, and because not
    # all zip clients support retrieving mtime, we store the files as ascii
    # and place a json file in the zip with the necessary information.

    def zipAdded(self):
        "Add files to a zip until over SYNC_ZIP_SIZE. Return zip data."
        f = StringIO()
        z = zipfile.ZipFile(f, "w", compression=zipfile.ZIP_DEFLATED)
        sz = 0
        cnt = 0
        files = {}
        cur = self.db.execute(
            "select fname from log where type = ?", MEDIA_ADD)
        fnames = []
        while 1:
            fname = cur.fetchone()
            if not fname:
                # add a flag so the server knows it can clean up
                z.writestr("_finished", "")
                break
            fname = fname[0]
            fnames.append([fname])
            z.write(fname, str(cnt))
            files[str(cnt)] = fname
            sz += os.path.getsize(fname)
            if sz > SYNC_ZIP_SIZE:
                break
            cnt += 1
        z.writestr("_meta", simplejson.dumps(files))
        z.close()
        return f.getvalue(), fnames

    def forgetAdded(self, fnames):
        if not fnames:
            return
        self.db.executemany("delete from log where fname = ?", fnames)
        self.db.commit()

    # Tracking changes (private)
    ##########################################################################

    def _initDB(self):
        self.db.executescript("""
create table media (fname text primary key, csum text, mod int);
create table meta (dirMod int, usn int); insert into meta values (0, 0);
create table log (fname text primary key, type int);
""")

    def _mtime(self, path):
        return int(os.stat(path).st_mtime)

    def _checksum(self, path):
        return checksum(open(path, "rb").read())

    def usn(self):
        return self.db.scalar("select usn from meta")

    def setUsn(self, usn):
        self.db.execute("update meta set usn = ?", usn)
        self.db.commit()

    def syncMod(self):
        self.db.execute("update meta set dirMod = ?", self._mtime(self.dir()))
        self.db.commit()

    def _changed(self):
        "Return dir mtime if it has changed since the last findChanges()"
        # doesn't track edits, but user can add or remove a file to update
        mod = self.db.scalar("select dirMod from meta")
        mtime = self._mtime(self.dir())
        if mod and mod == mtime:
            return False
        return mtime

    def findChanges(self):
        "Scan the media folder if it's changed, and note any changes."
        if self._changed():
            self._logChanges()

    def _logChanges(self):
        (added, removed) = self._changes()
        log = []
        media = []
        mediaRem = []
        for f in added:
            mt = self._mtime(f)
            media.append((f, self._checksum(f), mt))
            log.append((f, MEDIA_ADD))
        for f in removed:
            mediaRem.append((f,))
            log.append((f, MEDIA_REM))
        # update media db
        self.db.executemany("insert or replace into media values (?,?,?)",
                            media)
        if mediaRem:
            self.db.executemany("delete from media where fname = ?",
                                mediaRem)
        self.db.execute("update meta set dirMod = ?", self._mtime(self.dir()))
        # and logs
        self.db.executemany("insert or replace into log values (?,?)", log)
        self.db.commit()

    def _changes(self):
        self.cache = {}
        for (name, csum, mod) in self.db.execute(
            "select * from media"):
            self.cache[name] = [csum, mod, False]
        added = []
        removed = []
        # loop through on-disk files
        for f in os.listdir(self.dir()):
            # ignore folders
            if os.path.isdir(f):
                continue
            # newly added?
            if f not in self.cache:
                added.append(f)
            else:
                # modified since last time?
                if self._mtime(f) != self.cache[f][1]:
                    # and has different checksum?
                    if self._checksum(f) != self.cache[f][0]:
                        added.append(f)
                # mark as used
                self.cache[f][2] = True
        # look for any entries in the cache that no longer exist on disk
        for (k, v) in self.cache.items():
            if not v[2]:
                removed.append(k)
        return added, removed

    def sanityCheck(self):
        assert not self.db.scalar("select count() from log")
        cnt = self.db.scalar("select count() from media")
        return cnt
Ejemplo n.º 48
0
class MediaManager:
    """
    _dir -- the directory of media. Unless server is given to the constructor, in this cas it's None. Directory is changed to it during synchronization, and then changed back to previous directory.
    _oldcwd -- the working directory when media manager is created. The directory is changed to this value when the MediaManager is closed. If server is given in the constructor, then it's None.

"""

    """Captures the argument foo of [sound:foo]"""
    soundRegexps = [r"(?i)(\[sound:(?P<fname>[^]]+)\])"]
    """Captures the argument foo of <img src=foo bar>, ignoring quotes around foo."""
    imgRegexps = [
        # src element quoted case
        r"(?i)(<img[^>]* src=(?P<str>[\"'])(?P<fname>[^>]+?)(?P=str)[^>]*>)",
        # unquoted case
        r"(?i)(<img[^>]* src=(?!['\"])(?P<fname>[^ >]+)[^>]*?>)",
    ]
    regexps = soundRegexps + imgRegexps

    def __init__(self, col, server):
        """
        TODO

        server -- always false in Anki"""
        self.col = col
        if server:
            self._dir = None
            return
        # media directory
        self._dir = re.sub(r"(?i)\.(anki2)$", ".media", self.col.path)
        if not os.path.exists(self._dir):
            os.makedirs(self._dir)
        try:
            self._oldcwd = os.getcwd()
        except OSError:
            # cwd doesn't exist
            self._oldcwd = None
        try:
            os.chdir(self._dir)
        except OSError:
            raise Exception("invalidTempFolder")
        # change database
        self.connect()

    def connect(self):
        """Ensure the existence of a database in current format, connected in self.db."""
        if self.col.server:
            return
        path = self.dir()+".db2"
        create = not os.path.exists(path)
        os.chdir(self._dir)
        self.db = DB(path)
        if create:
            self._initDB()
        self.maybeUpgrade()

    def _initDB(self):
        self.db.executescript("""
create table media (
 fname text not null primary key,
 csum text,           -- null indicates deleted file
 mtime int not null,  -- zero if deleted
 dirty int not null
);

create index idx_media_dirty on media (dirty);

create table meta (dirMod int, lastUsn int); insert into meta values (0, 0);
""")

    def maybeUpgrade(self):
        """Upgrade database in old format to current format."""
        oldpath = self.dir()+".db"
        if os.path.exists(oldpath):
            self.db.execute('attach "../collection.media.db" as old')
            try:
                self.db.execute("""
    insert into media
     select m.fname, csum, mod, ifnull((select 1 from log l2 where l2.fname=m.fname), 0) as dirty
     from old.media m
     left outer join old.log l using (fname)
     union
     select fname, null, 0, 1 from old.log where type=1;""")
                self.db.execute("delete from meta")
                self.db.execute("""
    insert into meta select dirMod, usn from old.meta
    """)
                self.db.commit()
            except Exception as e:
                # if we couldn't import the old db for some reason, just start
                # anew
                self.col.log("failed to import old media db:"+traceback.format_exc())
            self.db.execute("detach old")
            npath = "../collection.media.db.old"
            if os.path.exists(npath):
                os.unlink(npath)
            os.rename("../collection.media.db", npath)

    def close(self):
        """Close database connection.

        don't do anything if server is truthy.
        change dir back to old working dir"""
        if self.col.server:
            return
        self.db.close()
        self.db = None
        # change cwd back to old location
        if self._oldcwd:
            try:
                os.chdir(self._oldcwd)
            except:
                # may have been deleted
                pass

    def _deleteDB(self):
        """Delete connected DB, connect to a new one"""
        path = self.db._path
        self.close()
        os.unlink(path)
        self.connect()

    def dir(self):
        """The directory of media"""
        return self._dir

    def _isFAT32(self):
        if not isWin:
            return
        # pylint: disable=import-error
        import win32api, win32file
        try:
            name = win32file.GetVolumeNameForVolumeMountPoint(self._dir[:3])
        except:
            # mapped & unmapped network drive; pray that it's not vfat
            return
        if win32api.GetVolumeInformation(name)[4].lower().startswith("fat"):
            return True

    # Adding media
    ##########################################################################
    # opath must be in unicode

    def addFile(self, opath):
        """Copy the file at path opath to collection.media,

        Name may be changed to ensure unicity.
        """
        with open(opath, "rb") as f:
            return self.writeData(opath, f.read())

    def writeData(self, opath, data, typeHint=None):
        """Add data in the file of name opath in media dir.

        Only file name of opath is keep.
        If file as no extension, and it is jpg or png according to typeHint, then add extension
        Add a number extension if this name already exists

        """
        # if fname is a full path, use only the basename
        fname = os.path.basename(opath)

        # if it's missing an extension and a type hint was provided, use that
        if not os.path.splitext(fname)[1] and typeHint:
            # mimetypes is returning '.jpe' even after calling .init(), so we'll do
            # it manually instead
            typeMap = {
                "image/jpeg": ".jpg",
                "image/png": ".png",
            }
            if typeHint in typeMap:
                fname += typeMap[typeHint]

        # make sure we write it in NFC form (pre-APFS Macs will autoconvert to NFD),
        # and return an NFC-encoded reference
        fname = unicodedata.normalize("NFC", fname)
        # ensure it's a valid filename
        base = self.cleanFilename(fname)
        (root, ext) = os.path.splitext(base)
        def repl(match):
            n = int(match.group(1))
            return " (%d)" % (n+1)
        # find the first available name
        csum = checksum(data)
        while True:
            fname = root + ext
            path = os.path.join(self.dir(), fname)
            # if it doesn't exist, copy it directly
            if not os.path.exists(path):
                with open(path, "wb") as f:
                    f.write(data)
                return fname
            # if it's identical, reuse
            with open(path, "rb") as f:
                if checksum(f.read()) == csum:
                    return fname
            # otherwise, increment the index in the filename
            reg = r" \((\d+)\)$"
            if not re.search(reg, root):
                root = root + " (1)"
            else:
                root = re.sub(reg, repl, root)

    # String manipulation
    ##########################################################################

    def filesInStr(self, mid, string, includeRemote=False):
        """The list of media's path in the string. 

        Medias starting with _ are treated as any media.

        Each clozes are expanded in every possible ways. It allows
        for different strings to be created.

        Concerning the part of the string related to LaTeX, media are
        generated as explained in latex._imgLink's docstring

        Keyword arguments:
        mid -- the id of the model of the note whose string is considered
        string -- A string, which corresponds to a field of a note
        includeRemote -- whether the list should include contents which is with http, https or ftp
        """
        l = []
        model = self.col.models.get(mid)
        strings = []
        if model['type'] == MODEL_CLOZE and "{{c" in string:
            # if the field has clozes in it, we'll need to expand the
            # possibilities so we can render latex
            strings = self._expandClozes(string)
        else:
            strings = [string]
        for string in strings:
            # handle latex
            string = mungeQA(string, None, None, model, None, self.col)
            # extract filenames
            for reg in self.regexps:
                for match in re.finditer(reg, string):
                    fname = match.group("fname")
                    isLocal = not re.match("(https?|ftp)://", fname.lower())
                    if isLocal or includeRemote:
                        l.append(fname)
        return l

    def _expandClozes(self, string):
        """The list of all strings, where the clozes are expanded.

        For each cloze number n, there is a string with cloze n replaced by [...] or by [hint], and every other clozes replaced by their text.

        There is also a text where each cloze are replaced by their value; i.e. the answer"""
        ords = set(re.findall(r"{{c(\d+)::.+?}}", string))
        #The set of clozes occurring in the string
        strings = []
        from anki.template.template import clozeReg
        def qrepl(m):
            """The text by which the cloze m must be replaced in the question."""
            if m.group(4):
                return "[%s]" % m.group(4)
            else:
                return "[...]"

            if m.group(3):
                return "[%s]" % m.group(3)
            else:
                return "[...]"
        def arepl(m):
            """The text by which the cloze m must be replaced in the answer."""
            return m.group(2)
        for ord in ords:
            s = re.sub(clozeReg%ord, qrepl, string)
            #Replace the cloze number ord by the deletion
            s = re.sub(clozeReg%".+?", "\\2", s)
            #Replace every other clozes by their content
            strings.append(s)
        strings.append(re.sub(clozeReg%".+?", arepl, string))
        return strings

    def transformNames(self, txt, func):
        """Apply func to all subtext matching the regexps txt."""
        for reg in self.regexps:
            txt = re.sub(reg, func, txt)
        return txt

    def strip(self, txt):
        """Delete all text matching the regexps txt"""
        for reg in self.regexps:
            txt = re.sub(reg, "", txt)
        return txt

    def escapeImages(self, string, unescape=False):
        """Replace local image url by replacing special character by the
        escape %xx or reciprocally depending on unescape value."""
        if unescape:
            fn = urllib.parse.unquote
        else:
            fn = urllib.parse.quote
        def repl(match):
            tag = match.group(0)
            fname = match.group("fname")
            if re.match("(https?|ftp)://", fname):
                return tag
            return tag.replace(fname, fn(fname))
        for reg in self.imgRegexps:
            string = re.sub(reg, repl, string)
        return string

    # Rebuilding DB
    ##########################################################################

    def check(self, local=None):
        "Return (missingFiles, unusedFiles, warnings)."
        mdir = self.dir()
        # gather all media references in NFC form
        allRefs = set()
        for nid, mid, flds in self.col.db.execute("select id, mid, flds from notes"):
            noteRefs = self.filesInStr(mid, flds)
            # check the refs are in NFC
            for f in noteRefs:
                # if they're not, we'll need to fix them first
                if f != unicodedata.normalize("NFC", f):
                    self._normalizeNoteRefs(nid)
                    noteRefs = self.filesInStr(mid, flds)
                    break
            allRefs.update(noteRefs)
        # loop through media folder
        unused = []
        if local is None:
            files = os.listdir(mdir)
        else:
            files = local
        renamedFiles = False
        dirFound = False
        warnings = []
        for file in files:
            if not local:
                if not os.path.isfile(file):
                    # ignore directories
                    dirFound = True
                    continue
            if file.startswith("_"):
                # leading _ says to ignore file
                continue

            if self.hasIllegal(file):
                name = file.encode(sys.getfilesystemencoding(), errors="replace")
                name = str(name, sys.getfilesystemencoding())
                warnings.append(
                    _("Invalid file name, please rename: %s") % name)
                continue

            nfcFile = unicodedata.normalize("NFC", file)
            # we enforce NFC fs encoding on non-macs
            if not isMac and not local:
                if file != nfcFile:
                    # delete if we already have the NFC form, otherwise rename
                    if os.path.exists(nfcFile):
                        os.unlink(file)
                        renamedFiles = True
                    else:
                        os.rename(file, nfcFile)
                        renamedFiles = True
                    file = nfcFile
            # compare
            if nfcFile not in allRefs:
                unused.append(file)
            else:
                allRefs.discard(nfcFile)
        # if we renamed any files to nfc format, we must rerun the check
        # to make sure the renamed files are not marked as unused
        if renamedFiles:
            return self.check(local=local)
        nohave = [x for x in allRefs if not x.startswith("_")]
        # make sure the media DB is valid
        try:
            self.findChanges()
        except DBError:
            self._deleteDB()

        if dirFound:
            warnings.append(
                _("Anki does not support files in subfolders of the collection.media folder."))
        return (nohave, unused, warnings)

    def _normalizeNoteRefs(self, nid):
        note = self.col.getNote(nid)
        for c, fld in enumerate(note.fields):
            nfc = unicodedata.normalize("NFC", fld)
            if nfc != fld:
                note.fields[c] = nfc
        note.flush()

    # Copying on import
    ##########################################################################

    def have(self, fname):
        """Whether a fil with name fname exists in the media directory"""
        return os.path.exists(os.path.join(self.dir(), fname))

    # Illegal characters and paths
    ##########################################################################

    _illegalCharReg = re.compile(r'[][><:"/?*^\\|\0\r\n]')

    def stripIllegal(self, str):
        """str, without its illegal characters"""
        return re.sub(self._illegalCharReg, "", str)

    def hasIllegal(self, str):
        """Whether str contains a illegal character.

        Either according to _illegalCharReg, or because it can't be encoded if file system encoding"""
        if re.search(self._illegalCharReg, str):
            return True
        try:
            str.encode(sys.getfilesystemencoding())
        except UnicodeEncodeError:
            return True
        return False

    def cleanFilename(self, fname):
        fname = self.stripIllegal(fname)
        fname = self._cleanWin32Filename(fname)
        fname = self._cleanLongFilename(fname)
        if not fname:
            fname = "renamed"

        return fname

    def _cleanWin32Filename(self, fname):
        if not isWin:
            return fname

        # deal with things like con/prn/etc
        p = pathlib.WindowsPath(fname)
        if p.is_reserved():
            fname = "renamed" + fname
            assert not pathlib.WindowsPath(fname).is_reserved()

        return fname

    def _cleanLongFilename(self, fname):
        # a fairly safe limit that should work on typical windows
        # paths and on eCryptfs partitions, even with a duplicate
        # suffix appended
        namemax = 136

        if isWin:
            pathmax = 240
        else:
            pathmax = 1024

        # cap namemax based on absolute path
        dirlen = len(os.path.dirname(os.path.abspath(fname)))
        remaining = pathmax - dirlen
        namemax = min(remaining, namemax)
        assert namemax > 0

        if len(fname) > namemax:
            head, ext = os.path.splitext(fname)
            headmax = namemax - len(ext)
            head = head[0:headmax]
            fname = head + ext
            assert(len(fname) <= namemax)

        return fname

    # Tracking changes
    ##########################################################################

    def findChanges(self):
        "Scan the media folder if it's changed, and note any changes in the db."
        if self._changed():
            self._logChanges()

    def haveDirty(self):
        """Whether the database has at least one dirty element"""
        return self.db.scalar("select 1 from media where dirty=1 limit 1")

    def _mtime(self, path):
        """Time of most recent content modification of file at path.

        Expressed in seconds."""
        return int(os.stat(path).st_mtime)

    def _checksum(self, path):
        """Checksum of file at path"""
        with open(path, "rb") as f:
            return checksum(f.read())

    def _changed(self):
        "Return dir mtime if it has changed since the last findChanges()"
        # doesn't track edits, but user can add or remove a file to update
        mod = self.db.scalar("select dirMod from meta")
        mtime = self._mtime(self.dir())
        if not self._isFAT32() and mod and mod == mtime:
            return False
        return mtime

    def _logChanges(self):
        (added, removed) = self._changes()
        media = []
        for f, mtime in added:
            media.append((f, self._checksum(f), mtime, 1))
        for f in removed:
            media.append((f, None, 0, 1))
        # update media db
        self.db.executemany("insert or replace into media values (?,?,?,?)",
                            media)
        self.db.execute("update meta set dirMod = ?", self._mtime(self.dir()))
        self.db.commit()

    def _changes(self):
        self.cache = {}
        for (name, csum, mod) in self.db.execute(
            "select fname, csum, mtime from media where csum is not null"):
            # previous entries may not have been in NFC form
            normname = unicodedata.normalize("NFC", name)
            self.cache[normname] = [csum, mod, False]
        added = []
        removed = []
        # loop through on-disk files
        with os.scandir(self.dir()) as it:
            for f in it:
                # ignore folders and thumbs.db
                if f.is_dir():
                    continue
                if f.name.lower() == "thumbs.db":
                    continue
                # and files with invalid chars
                if self.hasIllegal(f.name):
                    continue
                # empty files are invalid; clean them up and continue
                sz = f.stat().st_size
                if not sz:
                    os.unlink(f.name)
                    continue
                if sz > 100*1024*1024:
                    self.col.log("ignoring file over 100MB", f.name)
                    continue
                # check encoding
                normname = unicodedata.normalize("NFC", f.name)
                if not isMac:
                    if f.name != normname:
                        # wrong filename encoding which will cause sync errors
                        if os.path.exists(normname):
                            os.unlink(f.name)
                        else:
                            os.rename(f.name, normname)
                else:
                    # on Macs we can access the file using any normalization
                    pass

                # newly added?
                mtime = int(f.stat().st_mtime)
                if normname not in self.cache:
                    added.append((normname, mtime))
                else:
                    # modified since last time?
                    if mtime != self.cache[normname][1]:
                        # and has different checksum?
                        if self._checksum(normname) != self.cache[normname][0]:
                            added.append((normname, mtime))
                    # mark as used
                    self.cache[normname][2] = True
        # look for any entries in the cache that no longer exist on disk
        for (k, v) in list(self.cache.items()):
            if not v[2]:
                removed.append(k)
        return added, removed

    # Syncing-related
    ##########################################################################

    def lastUsn(self):
        return self.db.scalar("select lastUsn from meta")

    def setLastUsn(self, usn):
        self.db.execute("update meta set lastUsn = ?", usn)
        self.db.commit()

    def syncInfo(self, fname):
        """(Checkusm, dirty number) from media with name fname"""
        ret = self.db.first(
            "select csum, dirty from media where fname=?", fname)
        return ret or (None, 0)

    def markClean(self, fnames):
        for fname in fnames:
            self.db.execute(
                "update media set dirty=0 where fname=?", fname)

    def syncDelete(self, fname):
        """Delete the file fname if it is not in media directory."""
        if os.path.exists(fname):
            os.unlink(fname)
        self.db.execute("delete from media where fname=?", fname)

    def mediaCount(self):
        """Number of media according to database"""
        return self.db.scalar(
            "select count() from media where csum is not null")

    def dirtyCount(self):
        """Number of dirty media according to database.

        (couting the one potentially deleted)"""
        return self.db.scalar(
            "select count() from media where dirty=1")

    def forceResync(self):
        self.db.execute("delete from media")
        self.db.execute("update meta set lastUsn=0,dirMod=0")
        self.db.commit()
        self.db.setAutocommit(True)
        self.db.execute("vacuum")
        self.db.execute("analyze")
        self.db.setAutocommit(False)

    # Media syncing: zips
    ##########################################################################

    def mediaChangesZip(self):
        """
        The pair with:
        * A string encoding a zip files with:
        ** media to upload
        ** _meta: a json list associating to each name (as in zip) to
        the real name of the file
        * list of media considered
        """
        f = io.BytesIO()
        z = zipfile.ZipFile(f, "w", compression=zipfile.ZIP_DEFLATED)

        fnames = []
        # meta is list of (fname, zipname), where zipname of None
        # is a deleted file
        meta = []
        sz = 0#sum of the size of the media.

        # loop over dirty medias. At most SYNC_ZIP_COUNT = 25 elements
        for c, (fname, csum) in enumerate(self.db.execute(
                        "select fname, csum from media where dirty=1"
                        " limit %d"%SYNC_ZIP_COUNT)):

            fnames.append(fname)
            normname = unicodedata.normalize("NFC", fname)

            if csum:
                self.col.log("+media zip", fname)
                z.write(fname, str(c))
                meta.append((normname, str(c)))
                sz += os.path.getsize(fname)
            else:
                self.col.log("-media zip", fname)
                meta.append((normname, ""))

            if sz >= SYNC_ZIP_SIZE:
                break

        z.writestr("_meta", json.dumps(meta))
        z.close()
        return f.getvalue(), fnames

    def addFilesFromZip(self, zipData):
        """
        Copy each file from zipData (except _meta) to the media
        folder, and add those files to the media database. Rename the
        file according to _meta.

        zipData -- A byte tream containing a zipfile, containing:
        * _meta, a file containing a json dict associtaing to each name of file in zip (except meta) a name to be used in the media folder
        * arbitrary fields to save in the media folder
        """
        f = io.BytesIO(zipData)
        z = zipfile.ZipFile(f, "r")
        media = []
        # get meta info first
        meta = json.loads(z.read("_meta").decode("utf8"))
        # then loop through all files
        cnt = 0
        for i in z.infolist():
            if i.filename == "_meta":
                # ignore previously-retrieved meta
                continue
            else:
                data = z.read(i)
                csum = checksum(data)
                name = meta[i.filename]
                # normalize name
                name = unicodedata.normalize("NFC", name)
                # save file
                with open(name, "wb") as f:
                    f.write(data)
                # update db
                media.append((name, csum, self._mtime(name), 0))
                cnt += 1
        if media:
            self.db.executemany(
                "insert or replace into media values (?,?,?,?)", media)
        return cnt
Ejemplo n.º 49
0
    def run(self):
        db = DB(self.file)
        ver = db.scalar(
            "select value from global_variables where key='version'")
        if not ver.startswith("Mnemosyne SQL 1") and ver not in ("2", "3"):
            self.log.append(_("File version unknown, trying import anyway."))
        # gather facts into temp objects
        curid = None
        notes = {}
        note = None
        for _id, id, k, v in db.execute("""
select _id, id, key, value from facts f, data_for_fact d where
f._id=d._fact_id"""):
            if id != curid:
                if note:
                    # pylint: disable=unsubscriptable-object
                    notes[note["_id"]] = note
                note = {"_id": _id}
                curid = id
            assert note
            note[k] = v
        if note:
            notes[note["_id"]] = note
        # gather cards
        front = []
        frontback = []
        vocabulary = []
        cloze = {}
        for row in db.execute("""
select _fact_id, fact_view_id, tags, next_rep, last_rep, easiness,
acq_reps+ret_reps, lapses, card_type_id from cards"""):
            # categorize note
            note = notes[row[0]]
            if row[1].endswith(".1"):
                if row[1].startswith("1.") or row[1].startswith("1::"):
                    front.append(note)
                elif row[1].startswith("2.") or row[1].startswith("2::"):
                    frontback.append(note)
                elif row[1].startswith("3.") or row[1].startswith("3::"):
                    vocabulary.append(note)
                elif row[1].startswith("5.1"):
                    cloze[row[0]] = note
            # check for None to fix issue where import can error out
            rawTags = row[2]
            if rawTags is None:
                rawTags = ""
            # merge tags into note
            tags = rawTags.replace(", ", "\x1f").replace(" ", "_")
            tags = tags.replace("\x1f", " ")
            if "tags" not in note:
                note["tags"] = []
            note["tags"] += self.col.tags.split(tags)
            note["tags"] = self.col.tags.canonify(note["tags"])
            # if it's a new card we can go with the defaults
            if row[3] == -1:
                continue
            # add the card
            c = ForeignCard()
            c.factor = int(row[5] * 1000)
            c.reps = row[6]
            c.lapses = row[7]
            # ivl is inferred in mnemosyne
            next, prev = row[3:5]
            c.ivl = max(1, (next - prev) // 86400)
            # work out how long we've got left
            rem = int((next - time.time()) / 86400)
            c.due = self.col.sched.today + rem
            # get ord
            m = re.search(r".(\d+)$", row[1])
            assert m
            ord = int(m.group(1)) - 1
            if "cards" not in note:
                note["cards"] = {}
            note["cards"][ord] = c
        self._addFronts(front)
        total = self.total
        self._addFrontBacks(frontback)
        total += self.total
        self._addVocabulary(vocabulary)
        self.total += total
        self._addCloze(cloze)
        self.total += total
        self.log.append(
            ngettext("%d note imported.", "%d notes imported.", self.total) %
            self.total)
Ejemplo n.º 50
0
class ProfileManager:
    def __init__(self, base=None, profile=None):
        self.name = None
        self.db = None
        # instantiate base folder
        self._setBaseFolder(base)
        # load metadata
        self.firstRun = self._loadMeta()
        # did the user request a profile to start up with?
        if profile:
            if profile not in self.profiles():
                QMessageBox.critical(None, "Error",
                                     "Requested profile does not exist.")
                sys.exit(1)
            try:
                self.load(profile)
            except TypeError:
                raise Exception("Provided profile does not exist.")

    # Base creation
    ######################################################################

    def ensureBaseExists(self):
        try:
            self._ensureExists(self.base)
        except:
            # can't translate, as lang not initialized
            QMessageBox.critical(
                None, "Error", """\
Anki could not create the folder %s. Please ensure that location is not \
read-only and you have permission to write to it. If you cannot fix this \
issue, please see the documentation for information on running Anki from \
a flash drive.""" % self.base)
            raise

    # Folder migration
    ######################################################################

    def _oldFolderLocation(self):
        if isMac:
            return os.path.expanduser("~/Documents/Anki")
        elif isWin:
            loc = QStandardPaths.writableLocation(
                QStandardPaths.DocumentsLocation)
            return os.path.join(loc, "Anki")
        else:
            p = os.path.expanduser("~/Anki")
            if os.path.exists(p):
                return p
            else:
                loc = QStandardPaths.writableLocation(
                    QStandardPaths.DocumentsLocation)
                if loc[:-1] == QStandardPaths.writableLocation(
                        QStandardPaths.HomeLocation):
                    # occasionally "documentsLocation" will return the home
                    # folder because the Documents folder isn't configured
                    # properly; fall back to an English path
                    return os.path.expanduser("~/Documents/Anki")
                else:
                    return os.path.join(loc, "Anki")

    def maybeMigrateFolder(self):
        oldBase = self._oldFolderLocation()

        if not os.path.exists(self.base) and os.path.exists(oldBase):
            shutil.move(oldBase, self.base)

    # Profile load/save
    ######################################################################

    def profiles(self):
        return sorted(x for x in self.db.list("select name from profiles")
                      if x != "_global")

    def load(self, name, passwd=None):
        data = self.db.scalar(
            "select cast(data as blob) from profiles where name = ?", name)
        # some profiles created in python2 may not decode properly
        prof = pickle.loads(data, errors="ignore")
        if prof['key'] and prof['key'] != self._pwhash(passwd):
            self.name = None
            return False
        if name != "_global":
            self.name = name
            self.profile = prof
        return True

    def save(self):
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, pickle.dumps(self.profile), self.name)
        self.db.execute(sql, pickle.dumps(self.meta), "_global")
        self.db.commit()

    def create(self, name):
        prof = profileConf.copy()
        self.db.execute("insert into profiles values (?, ?)", name,
                        pickle.dumps(prof))
        self.db.commit()

    def remove(self, name):
        p = self.profileFolder()
        if os.path.exists(p):
            send2trash(p)
        self.db.execute("delete from profiles where name = ?", name)
        self.db.commit()

    def rename(self, name):
        oldName = self.name
        oldFolder = self.profileFolder()
        self.name = name
        newFolder = self.profileFolder(create=False)
        if os.path.exists(newFolder):
            if (oldFolder != newFolder) and (oldFolder.lower()
                                             == newFolder.lower()):
                # OS is telling us the folder exists because it does not take
                # case into account; use a temporary folder location
                midFolder = ''.join([oldFolder, '-temp'])
                if not os.path.exists(midFolder):
                    os.rename(oldFolder, midFolder)
                    oldFolder = midFolder
                else:
                    showWarning(
                        _("Please remove the folder %s and try again.") %
                        midFolder)
                    self.name = oldName
                    return
            else:
                showWarning(_("Folder already exists."))
                self.name = oldName
                return

        # update name
        self.db.execute("update profiles set name = ? where name = ?", name,
                        oldName)
        # rename folder
        try:
            os.rename(oldFolder, newFolder)
        except WindowsError as e:
            self.db.rollback()
            if "Access is denied" in e:
                showWarning(
                    _("""\
Anki could not rename your profile because it could not rename the profile \
folder on disk. Please ensure you have permission to write to Documents/Anki \
and no other programs are accessing your profile folders, then try again."""))
            else:
                raise
        except:
            self.db.rollback()
            raise
        else:
            self.db.commit()

    # Folder handling
    ######################################################################

    def profileFolder(self, create=True):
        path = os.path.join(self.base, self.name)
        if create:
            self._ensureExists(path)
        return path

    def addonFolder(self):
        return self._ensureExists(os.path.join(self.base, "addons21"))

    def backupFolder(self):
        return self._ensureExists(os.path.join(self.profileFolder(),
                                               "backups"))

    def collectionPath(self):
        return os.path.join(self.profileFolder(), "collection.anki2")

    # Helpers
    ######################################################################

    def _ensureExists(self, path):
        if not os.path.exists(path):
            os.makedirs(path)
        return path

    def _setBaseFolder(self, cmdlineBase):
        if cmdlineBase:
            self.base = os.path.abspath(cmdlineBase)
        elif os.environ.get("ANKI_BASE"):
            self.base = os.path.abspath(os.environ["ANKI_BASE"])
        else:
            self.base = self._defaultBase()
            self.maybeMigrateFolder()
        self.ensureBaseExists()

    def _defaultBase(self):
        if isWin:
            return os.path.join(os.environ["APPDATA"], "Anki2")
        elif isMac:
            return os.path.expanduser("~/Library/Application Support/Anki2")
        else:
            dataDir = os.environ.get("XDG_DATA_HOME",
                                     os.path.expanduser("~/.local/share"))
            if not os.path.exists(dataDir):
                os.makedirs(dataDir)
            return os.path.join(dataDir, "Anki2")

    def _loadMeta(self):
        path = os.path.join(self.base, "prefs21.db")
        new = not os.path.exists(path)

        def recover():
            # if we can't load profile, start with a new one
            if self.db:
                try:
                    self.db.close()
                except:
                    pass
            broken = path + ".broken"
            if os.path.exists(broken):
                os.unlink(broken)
            os.rename(path, broken)
            QMessageBox.warning(
                None, "Preferences Corrupt", """\
Anki's prefs21.db file was corrupt and has been recreated. If you were using multiple \
profiles, please add them back using the same names to recover your cards.""")

        try:
            self.db = DB(path)
            self.db.execute("""
create table if not exists profiles
(name text primary key, data text not null);""")
        except:
            recover()
            return self._loadMeta()
        if not new:
            # load previously created
            try:
                self.meta = pickle.loads(
                    self.db.scalar(
                        "select cast(data as blob) from profiles where name = '_global'"
                    ))
                return
            except:
                recover()
                return self._loadMeta()
        # create a default global profile
        self.meta = metaConf.copy()
        self.db.execute(
            "insert or replace into profiles values ('_global', ?)",
            pickle.dumps(metaConf))
        self._setDefaultLang()
        return True

    def ensureProfile(self):
        "Create a new profile if none exists."
        if self.firstRun:
            self.create(_("User 1"))
            p = os.path.join(self.base, "README.txt")
            open(p, "w").write(
                _("""\
This folder stores all of your Anki data in a single location,
to make backups easy. To tell Anki to use a different location,
please see:

%s
""") % (appHelpSite + "#startupopts"))

    def _pwhash(self, passwd):
        return checksum(str(self.meta['id']) + str(passwd))

    # Default language
    ######################################################################
    # On first run, allow the user to choose the default language

    def _setDefaultLang(self):
        # the dialog expects _ to be defined, but we're running before
        # setupLang() has been called. so we create a dummy op for now
        import builtins
        builtins.__dict__['_'] = lambda x: x

        # create dialog
        class NoCloseDiag(QDialog):
            def reject(self):
                pass

        d = self.langDiag = NoCloseDiag()
        f = self.langForm = aqt.forms.setlang.Ui_Dialog()
        f.setupUi(d)
        d.accepted.connect(self._onLangSelected)
        d.rejected.connect(lambda: True)
        # default to the system language
        try:
            (lang, enc) = locale.getdefaultlocale()
        except:
            # fails on osx
            lang = "en"
        if lang and lang not in ("pt_BR", "zh_CN", "zh_TW"):
            lang = re.sub("(.*)_.*", "\\1", lang)
        # find index
        idx = None
        en = None
        for c, (name, code) in enumerate(anki.lang.langs):
            if code == "en":
                en = c
            if code == lang:
                idx = c
        # if the system language isn't available, revert to english
        if idx is None:
            idx = en
        # update list
        f.lang.addItems([x[0] for x in anki.lang.langs])
        f.lang.setCurrentRow(idx)
        d.exec_()

    def _onLangSelected(self):
        f = self.langForm
        obj = anki.lang.langs[f.lang.currentRow()]
        code = obj[1]
        name = obj[0]
        en = "Are you sure you wish to display Anki's interface in %s?"
        r = QMessageBox.question(None, "Anki", en % name,
                                 QMessageBox.Yes | QMessageBox.No,
                                 QMessageBox.No)
        if r != QMessageBox.Yes:
            return self._setDefaultLang()
        self.setLang(code)

    def setLang(self, code):
        self.meta['defaultLang'] = code
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, pickle.dumps(self.meta), "_global")
        self.db.commit()
        anki.lang.setLang(code, local=False)
Ejemplo n.º 51
0
    def _loadMeta(self, retrying=False) -> LoadMetaResult:
        result = LoadMetaResult()
        result.firstTime = False
        result.loadError = retrying

        opath = os.path.join(self.base, "prefs.db")
        path = os.path.join(self.base, "prefs21.db")
        if not retrying and os.path.exists(opath) and not os.path.exists(path):
            shutil.copy(opath, path)

        result.firstTime = not os.path.exists(path)

        def recover() -> None:
            # if we can't load profile, start with a new one
            if self.db:
                try:
                    self.db.close()
                except:
                    pass
            for suffix in ("", "-journal"):
                fpath = path + suffix
                if os.path.exists(fpath):
                    os.unlink(fpath)

        # open DB file and read data
        try:
            self.db = DB(path)
            assert self.db.scalar("pragma integrity_check") == "ok"
            self.db.execute(
                """
create table if not exists profiles
(name text primary key, data blob not null);"""
            )
            data = self.db.scalar(
                "select cast(data as blob) from profiles where name = '_global'"
            )
        except:
            traceback.print_stack()
            if result.loadError:
                # already failed, prevent infinite loop
                raise
            # delete files and try again
            recover()
            return self._loadMeta(retrying=True)

        # try to read data
        if not result.firstTime:
            try:
                self.meta = self._unpickle(data)
                return result
            except:
                traceback.print_stack()
                print("resetting corrupt _global")
                result.loadError = True
                result.firstTime = True

        # if new or read failed, create a default global profile
        self.meta = metaConf.copy()
        self.db.execute(
            "insert or replace into profiles values ('_global', ?)",
            self._pickle(metaConf),
        )
        return result
Ejemplo n.º 52
0
class ProfileManager(object):

    def __init__(self, base=None, profile=None):
        self.name = None
        # instantiate base folder
        if base:
            self.base = os.path.abspath(base)
        else:
            self.base = self._defaultBase()
        self.ensureBaseExists()
        # load metadata
        self.firstRun = self._loadMeta()
        # did the user request a profile to start up with?
        if profile:
            try:
                self.load(profile)
            except TypeError:
                raise Exception("Provided profile does not exist.")

    # Base creation
    ######################################################################

    def ensureBaseExists(self):
        try:
            self._ensureExists(self.base)
        except:
            # can't translate, as lang not initialized
            QMessageBox.critical(
                None, "Error", """\
Anki can't write to the harddisk. Please see the \
documentation for information on using a flash drive.""")
            raise

    # Profile load/save
    ######################################################################

    def profiles(self):
        return sorted(
            unicode(x, "utf8") for x in
            self.db.list("select name from profiles")
            if x != "_global")

    def load(self, name, passwd=None):
        prof = cPickle.loads(
            self.db.scalar("select data from profiles where name = ?",
                           name.encode("utf8")))
        if prof['key'] and prof['key'] != self._pwhash(passwd):
            self.name = None
            return False
        if name != "_global":
            self.name = name
            self.profile = prof
        return True

    def save(self):
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, cPickle.dumps(self.profile),
                        self.name.encode("utf8"))
        self.db.execute(sql, cPickle.dumps(self.meta), "_global")
        self.db.commit()

    def create(self, name):
        prof = profileConf.copy()
        self.db.execute("insert into profiles values (?, ?)",
                        name.encode("utf8"), cPickle.dumps(prof))
        self.db.commit()

    def remove(self, name):
        p = self.profileFolder()
        if os.path.exists(p):
            send2trash(p)
        self.db.execute("delete from profiles where name = ?",
                        name.encode("utf8"))
        self.db.commit()

    def rename(self, name):
        oldName = self.name
        oldFolder = self.profileFolder()
        self.name = name
        newFolder = self.profileFolder(create=False)
        if os.path.exists(newFolder):
            showWarning(_("Folder already exists."))
            self.name = oldName
            return
        # update name
        self.db.execute("update profiles set name = ? where name = ?",
                        name.encode("utf8"), oldName.encode("utf-8"))
        # rename folder
        os.rename(oldFolder, newFolder)
        self.db.commit()

    # Folder handling
    ######################################################################

    def profileFolder(self, create=True):
        path = os.path.join(self.base, self.name)
        if create:
            self._ensureExists(path)
        return path

    def addonFolder(self):
        return self._ensureExists(os.path.join(self.base, "addons"))

    def backupFolder(self):
        return self._ensureExists(
            os.path.join(self.profileFolder(), "backups"))

    def collectionPath(self):
        return os.path.join(self.profileFolder(), "collection.anki2")

    # Helpers
    ######################################################################

    def _ensureExists(self, path):
        if not os.path.exists(path):
            os.makedirs(path)
        return path

    def _defaultBase(self):
        if isWin:
            if False: #qtmajor >= 5:
                loc = QStandardPaths.writeableLocation(QStandardPaths.DocumentsLocation)
            else:
                loc = QDesktopServices.storageLocation(QDesktopServices.DocumentsLocation)
            return os.path.join(loc, "Anki")
        elif isMac:
            return os.path.expanduser("~/Documents/Anki")
        else:
            return os.path.expanduser("~/Anki")

    def _loadMeta(self):
        path = os.path.join(self.base, "prefs.db")
        new = not os.path.exists(path)
        def recover():
            # if we can't load profile, start with a new one
            os.rename(path, path+".broken")
            QMessageBox.warning(
                None, "Preferences Corrupt", """\
Anki's prefs.db file was corrupt and has been recreated. If you were using multiple \
profiles, please add them back using the same names to recover your cards.""")
        try:
            self.db = DB(path, text=str)
            self.db.execute("""
create table if not exists profiles
(name text primary key, data text not null);""")
        except:
            recover()
            return self._loadMeta()
        if not new:
            # load previously created
            try:
                self.meta = cPickle.loads(
                    self.db.scalar(
                        "select data from profiles where name = '_global'"))
                return
            except:
                recover()
                return self._loadMeta()
        # create a default global profile
        self.meta = metaConf.copy()
        self.db.execute("insert or replace into profiles values ('_global', ?)",
                        cPickle.dumps(metaConf))
        self._setDefaultLang()
        return True

    def ensureProfile(self):
        "Create a new profile if none exists."
        if self.firstRun:
            self.create(_("User 1"))
            p = os.path.join(self.base, "README.txt")
            open(p, "w").write((_("""\
This folder stores all of your Anki data in a single location,
to make backups easy. To tell Anki to use a different location,
please see:

%s
""") % (appHelpSite +  "#startupopts")).encode("utf8"))

    def _pwhash(self, passwd):
        return checksum(unicode(self.meta['id'])+unicode(passwd))

    # Default language
    ######################################################################
    # On first run, allow the user to choose the default language

    def _setDefaultLang(self):
        # the dialog expects _ to be defined, but we're running before
        # setupLang() has been called. so we create a dummy op for now
        import __builtin__
        __builtin__.__dict__['_'] = lambda x: x
        # create dialog
        class NoCloseDiag(QDialog):
            def reject(self):
                pass
        d = self.langDiag = NoCloseDiag()
        f = self.langForm = aqt.forms.setlang.Ui_Dialog()
        f.setupUi(d)
        d.connect(d, SIGNAL("accepted()"), self._onLangSelected)
        d.connect(d, SIGNAL("rejected()"), lambda: True)
        # default to the system language
        try:
            (lang, enc) = locale.getdefaultlocale()
        except:
            # fails on osx
            lang = "en"
        if lang and lang not in ("pt_BR", "zh_CN", "zh_TW"):
            lang = re.sub("(.*)_.*", "\\1", lang)
        # find index
        idx = None
        en = None
        for c, (name, code) in enumerate(langs):
            if code == "en":
                en = c
            if code == lang:
                idx = c
        # if the system language isn't available, revert to english
        if idx is None:
            idx = en
        # update list
        f.lang.addItems([x[0] for x in langs])
        f.lang.setCurrentRow(idx)
        d.exec_()

    def _onLangSelected(self):
        f = self.langForm
        obj = langs[f.lang.currentRow()]
        code = obj[1]
        name = obj[0]
        en = "Are you sure you wish to display Anki's interface in %s?"
        r = QMessageBox.question(
            None, "Anki", en%name, QMessageBox.Yes | QMessageBox.No,
            QMessageBox.No)
        if r != QMessageBox.Yes:
            return self._setDefaultLang()
        self.meta['defaultLang'] = code
        sql = "update profiles set data = ? where name = ?"
        self.db.execute(sql, cPickle.dumps(self.meta), "_global")
        self.db.commit()