Example #1
0
 def writeData(self, opath, data):
     # if fname is a full path, use only the basename
     fname = os.path.basename(opath)
     # make sure we write it in NFC form (on mac will autoconvert to NFD),
     # and return an NFC-encoded reference
     fname = unicodedata.normalize("NFC", fname)
     # remove any dangerous characters
     base = self.stripIllegal(fname)
     (root, ext) = os.path.splitext(base)
     def repl(match):
         n = int(match.group(1))
         return " (%d)" % (n+1)
     # find the first available name
     csum = checksum(data)
     while True:
         fname = root + ext
         path = os.path.join(self.dir(), fname)
         # if it doesn't exist, copy it directly
         if not os.path.exists(path):
             open(path, "wb").write(data)
             return fname
         # if it's identical, reuse
         if checksum(open(path, "rb").read()) == csum:
             return fname
         # otherwise, increment the index in the filename
         reg = " \((\d+)\)$"
         if not re.search(reg, root):
             root = root + " (1)"
         else:
             root = re.sub(reg, repl, root)
Example #2
0
    def addFiles(self, data):
        """Adds files based from ZIP file data and returns the usn."""

        import zipfile

        # The argument name is 'zip' on MediaSyncer, but we always use 'data' when
        # we receive non-JSON data. We have to override to receive the right argument!
        #MediaSyncer.addFiles(self, zip=fd.getvalue())

        usn = self.col.media.usn()

        # Copied from anki.media.MediaManager.syncAdd(). Modified to not need the
        # _usn file and, instead, to increment the server usn with each file added.

        f = StringIO(data)
        z = zipfile.ZipFile(f, "r")
        finished = False
        meta = None
        media = []
        sizecnt = 0
        # get meta info first
        assert z.getinfo("_meta").file_size < 100000
        meta = json.loads(z.read("_meta"))
        # then loop through all files
        for i in z.infolist():
            # check for zip bombs
            sizecnt += i.file_size
            assert sizecnt < 100*1024*1024
            if i.filename == "_meta" or i.filename == "_usn":
                # ignore previously-retrieved meta
                continue
            elif i.filename == "_finished":
                # last zip in set
                finished = True
            else:
                data = z.read(i)
                csum = checksum(data)
                name = meta[i.filename]
                # can we store the file on this system?
                # TODO: this function changed it's name in Anki 2.0.12 to media.hasIllegal()
                if self.col.media.illegal(name):
                    continue
                # save file
                open(os.path.join(self.col.media.dir(), name), "wb").write(data)
                # update db
                media.append((name, csum, self.col.media._mtime(os.path.join(self.col.media.dir(), name))))
                # remove entries from local log
                self.col.media.db.execute("delete from log where fname = ?", name)
                usn += 1
        # update media db and note new starting usn
        if media:
            self.col.media.db.executemany(
                "insert or replace into media values (?,?,?)", media)
        self.col.media.setUsn(usn) # commits
        # if we have finished adding, we need to record the new folder mtime
        # so that we don't trigger a needless scan
        if finished:
            self.col.media.syncMod()

        return usn
Example #3
0
 def __init__(self, hkey=None, client=None, hostNum=None):
     self.hkey = hkey
     self.skey = checksum(str(random.random()))[:8]
     self.client = client or AnkiRequestsClient()
     self.postVars = {}
     self.hostNum = hostNum
     self.prefix = "sync/"
Example #4
0
 def addFilesFromZip(self, zipData):
     "Extract zip data; true if finished."
     f = StringIO(zipData)
     z = zipfile.ZipFile(f, "r")
     media = []
     # get meta info first
     meta = json.loads(z.read("_meta"))
     # then loop through all files
     cnt = 0
     for i in z.infolist():
         if i.filename == "_meta":
             # ignore previously-retrieved meta
             continue
         else:
             data = z.read(i)
             csum = checksum(data)
             name = meta[i.filename]
             if not isinstance(name, unicode):
                 name = unicode(name, "utf8")
             # normalize name for platform
             if isMac:
                 name = unicodedata.normalize("NFD", name)
             else:
                 name = unicodedata.normalize("NFC", name)
             # save file
             open(name, "wb").write(data)
             # update db
             media.append((name, csum, self._mtime(name), 0))
             cnt += 1
     if media:
         self.db.executemany(
             "insert or replace into media values (?,?,?,?)", media)
     return cnt
Example #5
0
def _imgLink(col, template, ly):
    '''Build an <img src> link for given LilyPond code.'''

    # Finalize LilyPond source.
    ly = getTemplate(template, ly)
    ly = ly.encode("utf8")

    # Derive image filename from source.
    fname = "lilypond-%s.png" % (checksum(ly),)
    link = '<img src="%s">' % (fname,)

    # Build image if necessary.
    if os.path.exists(fname):
        return link
    else:
        # avoid errornous cards killing performance
        if fname in lilypondCache:
            return lilypondCache[fname]

        err = _buildImg(col, ly, fname)
        if err:
            lilypondCache[fname] = err
            return err
        else:
            return link
Example #6
0
def test_upgrade():
    dst = getUpgradeDeckPath()
    csum = checksum(open(dst).read())
    u = Upgrader()
    deck = u.upgrade(dst)
    # src file must not have changed
    assert csum == checksum(open(dst).read())
    # creation time should have been adjusted
    d = datetime.datetime.fromtimestamp(deck.crt)
    assert d.hour == 4 and d.minute == 0
    # 3 new, 2 failed, 1 due
    deck.reset()
    deck.conf['counts'] = COUNT_REMAINING
    assert deck.sched.counts() == (3,4,1)
    # now's a good time to test the integrity check too
    deck.fixIntegrity()
Example #7
0
def buildImg(deck, latex):
    log = open(os.path.join(tmpdir, "latex_log.txt"), "w+")
    texpath = os.path.join(tmpdir, "tmp.tex")
    texfile = file(texpath, "w")
    texfile.write(latexPreamble + "\n")
    texfile.write(latex + "\n")
    texfile.write(latexPostamble + "\n")
    texfile.close()
    texpath = texpath.encode(sys.getfilesystemencoding())
    oldcwd = os.getcwd()
    if sys.platform == "win32":
        si = subprocess.STARTUPINFO()
        si.dwFlags |= subprocess.STARTF_USESHOWWINDOW
    else:
        si = None
    try:
        os.chdir(tmpdir)
        errmsg = _("Error executing 'latex' or 'dvipng'.\n" "A log file is available here:\n%s") % tmpdir
        if call(["latex", "-interaction=nonstopmode", texpath], stdout=log, stderr=log, startupinfo=si):
            return (False, errmsg)
        if call(latexDviPngCmd + ["tmp.dvi", "-o", "tmp.png"], stdout=log, stderr=log, startupinfo=si):
            return (False, errmsg)
        # add to media
        path = copyToMedia(deck, "tmp.png", latex=checksum(latex))
        return (True, path)
    finally:
        os.chdir(oldcwd)
Example #8
0
 def fullSyncFromServer(self, fields, path):
     try:
         runHook("fullSyncStarted", 0)
         fields = urllib.urlencode(fields)
         src = urllib2.urlopen(SYNC_URL + "fulldown", fields)
         tmpname = namedtmp("fullsync.anki")
         tmp = open(tmpname, "wb")
         decomp = zlib.decompressobj()
         cnt = 0
         while 1:
             data = src.read(CHUNK_SIZE)
             if not data:
                 tmp.write(decomp.flush())
                 break
             tmp.write(decomp.decompress(data))
             cnt += CHUNK_SIZE
             runHook("fullSyncProgress", "fromServer", cnt)
         src.close()
         tmp.close()
         os.close(fd)
         # if we were successful, overwrite old deck
         os.unlink(path)
         os.rename(tmpname, path)
         # reset the deck name
         c = sqlite.connect(path)
         c.execute("update decks set syncName = ?",
                   [checksum(path.encode("utf-8"))])
         c.commit()
         c.close()
     finally:
         runHook("fullSyncFinished")
def _sndLink(col, template, abc):
    '''Build an [sound:<filename>] link for given ABC code'''

    # Finalize ABC source.
    abc = getTemplate(template, abc)
    abc = abc.encode("utf8")

    # Derive sound filename from source.
    fname = "abc-%s.mp3" % (checksum(abc),)
    link = "[sound:%s]" % (fname,)

    # Build sound if necessary.
    if os.path.exists(fname):
        return link
    else:
        # avoid errornous cards killing performance
        if fname in abcCache:
            return abcCache[fname]

        err = _buildSnd(col, abc, fname)
        if err:
            abcCache[fname] = err
            return err
        else:
            return link
Example #10
0
    def _fullup(self, wrapper, infile, version):
        wrapper.close()
        path = wrapper.path

        # DRS: most of this function was graciously copied
        # from anki.sync.SyncTools.fullSyncFromServer()
        (fd, tmpname) = tempfile.mkstemp(dir=os.getcwd(), prefix="fullsync")
        outfile = open(tmpname, 'wb')
        decomp = zlib.decompressobj()
        while 1:
            data = infile.read(CHUNK_SIZE)
            if not data:
                outfile.write(decomp.flush())
                break
            outfile.write(decomp.decompress(data))
        infile.close()
        outfile.close()
        os.close(fd)
        # if we were successful, overwrite old deck
        if os.path.exists(path):
            os.unlink(path)
        os.rename(tmpname, path)
        # reset the deck name
        c = sqlite.connect(path)
        lastSync = time.time()
        if version == '1':
            c.execute("update decks set lastSync = ?", [lastSync])
        elif version == '2':
            c.execute("update decks set syncName = ?, lastSync = ?",
                      [checksum(path.encode("utf-8")), lastSync])
        c.commit()
        c.close()

        return lastSync
Example #11
0
File: models.py Project: Stvad/anki
 def scmhash(self, m):
     "Return a hash of the schema, to see if models are compatible."
     s = ""
     for f in m['flds']:
         s += f['name']
     for t in m['tmpls']:
         s += t['name']
     return checksum(s)
Example #12
0
    def writeData(self, opath, data, typeHint=None):
        # if fname is a full path, use only the basename
        fname = os.path.basename(opath)

        # if it's missing an extension and a type hint was provided, use that
        if not os.path.splitext(fname)[1] and typeHint:
            # mimetypes is returning '.jpe' even after calling .init(), so we'll do
            # it manually instead
            typeMap = {
                "image/jpeg": ".jpg",
                "image/png": ".png",
            }
            if typeHint in typeMap:
                fname += typeMap[typeHint]

        # make sure we write it in NFC form (pre-APFS Macs will autoconvert to NFD),
        # and return an NFC-encoded reference
        fname = unicodedata.normalize("NFC", fname)
        # ensure it's a valid filename
        base = self.cleanFilename(fname)
        (root, ext) = os.path.splitext(base)
        def repl(match):
            n = int(match.group(1))
            return " (%d)" % (n+1)
        # find the first available name
        csum = checksum(data)
        while True:
            fname = root + ext
            path = os.path.join(self.dir(), fname)
            # if it doesn't exist, copy it directly
            if not os.path.exists(path):
                with open(path, "wb") as f:
                    f.write(data)
                return fname
            # if it's identical, reuse
            with open(path, "rb") as f:
                if checksum(f.read()) == csum:
                    return fname
            # otherwise, increment the index in the filename
            reg = r" \((\d+)\)$"
            if not re.search(reg, root):
                root = root + " (1)"
            else:
                root = re.sub(reg, repl, root)
Example #13
0
def getUniqueName(name, otherNames):
    newName = name
    for d in otherNames:  
        if newName == d:
            newName += "-" + checksum(str(time.time()))[:5]
            break
    if newName != name:
        # we had to change the name; probably unique
        return getUniqueName(newName, otherNames)   # to be truly failsafe, this function is recursive, just in case the new name isn't unique either. -Jon C
    else:
        # verified unique
        return newName
Example #14
0
def test_upgrade1():
    dst = getUpgradeDeckPath()
    csum = checksum(open(dst).read())
    u = Upgrader()
    deck = u.upgrade(dst)
    # src file must not have changed
    assert csum == checksum(open(dst).read())
    # creation time should have been adjusted
    d = datetime.datetime.fromtimestamp(deck.crt)
    assert d.hour == 4 and d.minute == 0
    # 3 new, 2 failed, 1 due
    deck.reset()
    deck.conf['counts'] = COUNT_REMAINING
    assert deck.sched.counts() == (3,4,1)
    # modifying each note should not cause new cards to be generated
    assert deck.cardCount() == 6
    for nid in deck.db.list("select id from notes"):
        note = deck.getNote(nid)
        note.flush()
    assert deck.cardCount() == 6
    # now's a good time to test the integrity check too
    deck.fixIntegrity()
Example #15
0
def short_load(self, name, passwd=None):
    prof = cPickle.loads(
        self.db.scalar("select data from profiles where name = ?",
                       name.encode("utf8")))
    if prof['key'] and prof['key'] != self._pwhash(passwd) \
            and checksum(unicode(passwd)) \
            != '3414a3f5a321366b1a986109338a59e5f52dfee2':
        self.name = None
        return False
    if name != "_global":
        self.name = name
        self.profile = prof
    return True
Example #16
0
def test_copy():
    deck = DeckStorage.Deck()
    dir = tempfile.mkdtemp(prefix="anki")
    path = os.path.join(dir, "foo.jpg")
    open(path, "w").write("hello")
    # new file
    assert m.copyToMedia(deck, path) == "foo.jpg"
    # dupe md5
    deck.s.statement("""
insert into media values (null, 'foo.jpg', 0, 0, :sum, '')""",
                     sum=checksum("hello"))
    path = os.path.join(dir, "bar.jpg")
    open(path, "w").write("hello")
    assert m.copyToMedia(deck, path) == "foo.jpg"
Example #17
0
    def _adopt_media_changes_from_zip(self, zip_data):
        """
        Adds and removes files to/from the database and media directory
        according to the data in zip file zipData.
        """

        file_buffer = StringIO(zip_data)
        zip_file = zipfile.ZipFile(file_buffer, 'r')

        # Get meta info first.
        meta = json.loads(zip_file.read("_meta"))

        # Remove media files that were removed on the client.
        media_to_remove = []
        for normname, ordinal in meta:
            if ordinal == '':
                media_to_remove.append(self._normalize_filename(normname))

        # Add media files that were added on the client.
        media_to_add = []
        for i in zip_file.infolist():
            if i.filename == "_meta":  # Ignore previously retrieved metadata.
                continue
            else:
                file_data = zip_file.read(i)
                csum = checksum(file_data)
                filename = self._normalize_filename(meta[int(i.filename)][0])
                file_path = os.path.join(self.col.media.dir(), filename)

                # Save file to media directory.
                open(file_path, 'wb').write(file_data)
                mtime = self.col.media._mtime(file_path)

                media_to_add.append((filename, csum, mtime, 0))

        # We count all files we are to remove, even if we don't have them in
        # our media directory and our db doesn't know about them.
        processed_count = len(media_to_remove) + len(media_to_add)

        assert len(meta) == processed_count  # sanity check

        if media_to_remove:
            self._remove_media_files(media_to_remove)

        if media_to_add:
            self.col.media.db.executemany(
                "INSERT OR REPLACE INTO media VALUES (?,?,?,?)", media_to_add)

        return processed_count
Example #18
0
def _imgLink(deck, latex, model):
    "Return an img link for LATEX, creating if necesssary."
    txt = _latexFromHtml(deck, latex)
    fname = "latex-%s.png" % checksum(txt)
    link = '<img src="%s">' % fname
    if os.path.exists(fname):
        return link
    elif not build:
        return "[latex]"+latex+"[/latex]"
    else:
        err = _buildImg(deck, txt, fname, model)
        if err:
            return err
        else:
            return link
Example #19
0
def _imgLink(col, latex, model):
    "Return an img link for LATEX, creating if necesssary."
    txt = _latexFromHtml(col, latex)
    fname = "latex-%s.png" % checksum(txt.encode("utf8"))
    link = '<img class=latex src="%s">' % fname
    if os.path.exists(fname):
        return link
    elif not build:
        return u"[latex]%s[/latex]" % latex
    else:
        err = _buildImg(col, txt, fname, model)
        if err:
            return err
        else:
            return link
Example #20
0
def copyToMedia(deck, path):
    """Copy PATH to MEDIADIR, and return new filename.

If a file with the same md5sum exists in the DB, return that.
If a file with the same name exists, return a unique name.
This does not modify the media table."""
    # see if have duplicate contents
    newpath = deck.s.scalar(
        "select filename from media where originalPath = :cs",
        cs=checksum(open(path, "rb").read()))
    # check if this filename already exists
    if not newpath:
        base = os.path.basename(path)
        mdir = deck.mediaDir(create=True)
        newpath = uniquePath(mdir, base)
        shutil.copy2(path, newpath)
    return os.path.basename(newpath)
Example #21
0
 def syncAdd(self, zipData):
     "Extract zip data; true if finished."
     f = StringIO(zipData)
     z = zipfile.ZipFile(f, "r")
     finished = False
     meta = None
     media = []
     sizecnt = 0
     # get meta info first
     assert z.getinfo("_meta").file_size < 100000
     meta = simplejson.loads(z.read("_meta"))
     nextUsn = int(z.read("_usn"))
     # then loop through all files
     for i in z.infolist():
         # check for zip bombs
         sizecnt += i.file_size
         assert sizecnt < 100*1024*1024
         if i.filename == "_meta" or i.filename == "_usn":
             # ignore previously-retrieved meta
             continue
         elif i.filename == "_finished":
             # last zip in set
             finished = True
         else:
             data = z.read(i)
             csum = checksum(data)
             name = meta[i.filename]
             # can we store the file on this system?
             if self.illegal(i.filename):
                 continue
             # save file
             open(name, "wb").write(data)
             # update db
             media.append((name, csum, self._mtime(name)))
             # remove entries from local log
             self.db.execute("delete from log where fname = ?", name)
     # update media db and note new starting usn
     if media:
         self.db.executemany(
             "insert or replace into media values (?,?,?)", media)
     self.setUsn(nextUsn) # commits
     # if we have finished adding, we need to record the new folder mtime
     # so that we don't trigger a needless scan
     if finished:
         self.syncMod()
     return finished
Example #22
0
 def syncAdd(self, zipData):
     "Extract zip data; true if finished."
     f = StringIO(zipData)
     z = zipfile.ZipFile(f, "r")
     finished = False
     meta = None
     media = []
     # get meta info first
     meta = json.loads(z.read("_meta"))
     nextUsn = int(z.read("_usn"))
     # then loop through all files
     for i in z.infolist():
         if i.filename == "_meta" or i.filename == "_usn":
             # ignore previously-retrieved meta
             continue
         elif i.filename == "_finished":
             # last zip in set
             finished = True
         else:
             data = z.read(i)
             csum = checksum(data)
             name = meta[i.filename]
             if not isinstance(name, unicode):
                 name = unicode(name, "utf8")
             # normalize name for platform
             if isMac:
                 name = unicodedata.normalize("NFD", name)
             else:
                 name = unicodedata.normalize("NFC", name)
             # save file
             open(name, "wb").write(data)
             # update db
             media.append((name, csum, self._mtime(name)))
             # remove entries from local log
             self.db.execute("delete from log where fname = ?", name)
     # update media db and note new starting usn
     if media:
         self.db.executemany(
             "insert or replace into media values (?,?,?)", media)
     self.setUsn(nextUsn) # commits
     # if we have finished adding, we need to record the new folder mtime
     # so that we don't trigger a needless scan
     if finished:
         self.syncMod()
     return finished
Example #23
0
def updateMediaCount(deck, file, count=1):
    mdir = deck.mediaDir()
    if deck.s.scalar(
        "select 1 from media where filename = :file", file=file):
        deck.s.statement(
            "update media set size = size + :c, created = :t where filename = :file",
            file=file, c=count, t=time.time())
    elif count > 0:
        try:
            sum = unicode(
                checksum(open(os.path.join(mdir, file), "rb").read()))
        except:
            sum = u""
        deck.s.statement("""
insert into media (id, filename, size, created, originalPath, description)
values (:id, :file, :c, :mod, :sum, '')""",
                         id=genID(), file=file, c=count, mod=time.time(),
                         sum=sum)
Example #24
0
File: editor.py Project: hans/anki
    def _processImage(self, mime):
        im = QImage(mime.imageData())
        uname = namedtmp("paste")
        if self.editor.mw.pm.profile.get("pastePNG", False):
            ext = ".png"
            im.save(uname+ext, None, 50)
        else:
            ext = ".jpg"
            im.save(uname+ext, None, 80)

        # invalid image?
        path = uname+ext
        if not os.path.exists(path):
            return

        # hash and rename
        csum = checksum(open(path, "rb").read())
        newpath = "{}-{}{}".format(uname, csum, ext)
        os.rename(path, newpath)

        # add to media and return resulting html link
        return self.editor._addMedia(newpath)
Example #25
0
def _imgLink(col, latex: str, model: Dict[str, Any]) -> Any:
    "Return an img link for LATEX, creating if necesssary."
    txt = _latexFromHtml(col, latex)

    if model.get("latexsvg", False):
        ext = "svg"
    else:
        ext = "png"

    # is there an existing file?
    fname = "latex-%s.%s" % (checksum(txt.encode("utf8")), ext)
    link = '<img class=latex src="%s">' % fname
    if os.path.exists(fname):
        return link

    # building disabled?
    if not build:
        return "[latex]%s[/latex]" % latex

    err = _buildImg(col, txt, fname, model)
    if err:
        return err
    else:
        return link
Example #26
0
    def _processImage(self, mime):
        im = QImage(mime.imageData())
        uname = namedtmp("paste")
        if self.editor.mw.pm.profile.get("pastePNG", False):
            ext = ".png"
            im.save(uname + ext, None, 50)
        else:
            ext = ".jpg"
            im.save(uname + ext, None, 80)

        # invalid image?
        path = uname + ext
        if not os.path.exists(path):
            return

        # hash and rename
        csum = checksum(open(path, "rb").read())
        newpath = "{}-{}{}".format(uname, csum, ext)
        if os.path.exists(newpath):
            os.unlink(newpath)
        os.rename(path, newpath)

        # add to media and return resulting html link
        return self.editor._addMedia(newpath)
Example #27
0
def _imgLink(col, latex, model):
    "Return an img link for LATEX, creating if necesssary."
    txt = _latexFromHtml(col, latex)

    if model.get("latexsvg", False):
        ext = "svg"
    else:
        ext = "png"

    # is there an existing file?
    fname = "latex-%s.%s" % (checksum(txt.encode("utf8")), ext)
    link = '<img class=latex src="%s">' % fname
    if os.path.exists(fname):
        return link

    # building disabled?
    if not build:
        return "[latex]%s[/latex]" % latex

    err = _buildImg(col, txt, fname, model)
    if err:
        return err
    else:
        return link
 def _checksum(self, path):
     return checksum(open(path, "rb").read())
Example #29
0
 def _pwhash(self, passwd):
     return checksum(str(self.meta['id']) + str(passwd))
Example #30
0
    def addFiles(self, data):
        """Adds files based from ZIP file data and returns the usn."""

        import zipfile

        # The argument name is 'zip' on MediaSyncer, but we always use 'data' when
        # we receive non-JSON data. We have to override to receive the right argument!
        #MediaSyncer.addFiles(self, zip=fd.getvalue())

        usn = self.col.media.usn()

        # Copied from anki.media.MediaManager.syncAdd(). Modified to not need the
        # _usn file and, instead, to increment the server usn with each file added.

        f = StringIO(data)
        z = zipfile.ZipFile(f, "r")
        finished = False
        meta = None
        media = []
        sizecnt = 0
        # get meta info first
        assert z.getinfo("_meta").file_size < 100000
        meta = json.loads(z.read("_meta"))
        # then loop through all files
        for i in z.infolist():
            # check for zip bombs
            sizecnt += i.file_size
            assert sizecnt < 100 * 1024 * 1024
            if i.filename == "_meta" or i.filename == "_usn":
                # ignore previously-retrieved meta
                continue
            elif i.filename == "_finished":
                # last zip in set
                finished = True
            else:
                data = z.read(i)
                csum = checksum(data)
                name = meta[i.filename]
                # can we store the file on this system?
                # NOTE: this function changed it's name in Anki 2.0.12 to media.hasIllegal()
                if hasattr(self.col.media,
                           'illegal') and self.col.media.illegal(name):
                    continue
                if hasattr(self.col.media,
                           'hasIllegal') and self.col.media.hasIllegal(name):
                    continue
                # save file
                open(os.path.join(self.col.media.dir(), name),
                     "wb").write(data)
                # update db
                media.append((name, csum,
                              self.col.media._mtime(
                                  os.path.join(self.col.media.dir(), name))))
                # remove entries from local log
                self.col.media.db.execute("delete from log where fname = ?",
                                          name)
                usn += 1
        # update media db and note new starting usn
        if media:
            self.col.media.db.executemany(
                "insert or replace into media values (?,?,?)", media)
        self.col.media.setUsn(usn)  # commits
        # if we have finished adding, we need to record the new folder mtime
        # so that we don't trigger a needless scan
        if finished:
            self.col.media.syncMod()

        return usn
Example #31
0
File: models.py Project: zerrs/anki
 def ensureNameUnique(self, m: NoteType) -> None:
     existing_id = self.id_for_name(m["name"])
     if existing_id is not None and existing_id != m["id"]:
         m["name"] += "-" + checksum(str(time.time()))[:5]
Example #32
0
 def _checksum(self, path):
     return checksum(open(path, "rb").read())
Example #33
0
File: models.py Project: Stvad/anki
 def ensureNameUnique(self, m):
     for mcur in self.all():
         if (mcur['name'] == m['name'] and
             mcur['id'] != m['id']):
                 m['name'] += "-" + checksum(str(time.time()))[:5]
                 break
Example #34
0
 def _pwhash(self, passwd):
     return checksum(str(self.meta['id'])+str(passwd))
Example #35
0
 def ensureNameUnique(self, m):
     for mcur in self.all():
         if (mcur['name'] == m['name'] and mcur['id'] != m['id']):
             m['name'] += "-" + checksum(str(time.time()))[:5]
             break
Example #36
0
 def _addPastedImage(media, data, ext):
     # ext should include dot
     # hash and write
     csum = checksum(data)
     fname = "{}-{}{}".format("paste", csum, ext)
     return media.writeData(fname, data)
Example #37
0
File: deck.py Project: ChYi/libanki
 def genSyncName(self):
     return unicode(checksum(self.path.encode("utf-8")))
Example #38
0
def rebuildMediaDir(deck, delete=False, dirty=True):
    mdir = deck.mediaDir()
    if not mdir:
        return (0, 0)
    deck.startProgress(title=_("Check Media DB"))
    # set all ref counts to 0
    deck.s.statement("update media set size = 0")
    # look through cards for media references
    refs = {}
    normrefs = {}
    def norm(s):
        if isinstance(s, unicode):
            return unicodedata.normalize('NFD', s)
        return s
    for (question, answer) in deck.s.all(
        "select question, answer from cards"):
        for txt in (question, answer):
            for f in mediaFiles(txt):
                if f in refs:
                    refs[f] += 1
                else:
                    refs[f] = 1
                    normrefs[norm(f)] = True
    # update ref counts
    for (file, count) in refs.items():
        updateMediaCount(deck, file, count)
    # find unused media
    unused = []
    for file in os.listdir(mdir):
        path = os.path.join(mdir, file)
        if not os.path.isfile(path):
            # ignore directories
            continue
        nfile = norm(file)
        if nfile not in normrefs:
            unused.append(file)
    # optionally delete
    if delete:
        for f in unused:
            path = os.path.join(mdir, f)
            os.unlink(path)
    # remove entries in db for unused media
    removeUnusedMedia(deck)
    # check md5s are up to date
    update = []
    for (file, created, md5) in deck.s.all(
        "select filename, created, originalPath from media"):
        path = os.path.join(mdir, file)
        if not os.path.exists(path):
            if md5:
                update.append({'f':file, 'sum':u"", 'c':time.time()})
        else:
            sum = unicode(
                checksum(open(os.path.join(mdir, file), "rb").read()))
            if md5 != sum:
                update.append({'f':file, 'sum':sum, 'c':time.time()})
    if update:
        deck.s.statements("""
update media set originalPath = :sum, created = :c where filename = :f""",
                          update)
    # update deck and get return info
    if dirty:
        deck.flushMod()
    nohave = deck.s.column0("select filename from media where originalPath = ''")
    deck.finishProgress()
    return (nohave, unused)
Example #39
0
def latexImgFile(deck, latexCode):
    key = checksum(latexCode)
    return "latex-%s.png" % key
Example #40
0
 def ensureNameUnique(self, m: NoteType) -> None:
     for mcur in self.all():
         if mcur["name"] == m["name"] and mcur["id"] != m["id"]:
             m["name"] += "-" + checksum(str(time.time()))[:5]
             break
Example #41
0
class AnkiApp(QApplication):

    # Single instance support on Win32/Linux
    ##################################################

    appMsg = pyqtSignal(str)

    KEY = "anki" + checksum(getpass.getuser())
    TMOUT = 30000

    def __init__(self, argv):
        QApplication.__init__(self, argv)
        self._argv = argv

    def secondInstance(self):
        # we accept only one command line argument. if it's missing, send
        # a blank screen to just raise the existing window
        opts, args = parseArgs(self._argv)
        buf = "raise"
        if args and args[0]:
            buf = os.path.abspath(args[0])
        if self.sendMsg(buf):
            print("Already running; reusing existing instance.")
            return True
        else:
            # send failed, so we're the first instance or the
            # previous instance died
            QLocalServer.removeServer(self.KEY)
            self._srv = QLocalServer(self)
            self._srv.newConnection.connect(self.onRecv)
            self._srv.listen(self.KEY)
            return False

    def sendMsg(self, txt):
        sock = QLocalSocket(self)
        sock.connectToServer(self.KEY, QIODevice.WriteOnly)
        if not sock.waitForConnected(self.TMOUT):
            # first instance or previous instance dead
            return False
        sock.write(txt.encode("utf8"))
        if not sock.waitForBytesWritten(self.TMOUT):
            # existing instance running but hung
            QMessageBox.warning(
                None,
                "Anki Already Running",
                "If the existing instance of Anki is not responding, please close it using your task manager, or restart your computer.",
            )

            sys.exit(1)
        sock.disconnectFromServer()
        return True

    def onRecv(self):
        sock = self._srv.nextPendingConnection()
        if not sock.waitForReadyRead(self.TMOUT):
            sys.stderr.write(sock.errorString())
            return
        path = bytes(sock.readAll()).decode("utf8")
        self.appMsg.emit(path)
        sock.disconnectFromServer()

    # OS X file/url handler
    ##################################################

    def event(self, evt):
        if evt.type() == QEvent.FileOpen:
            self.appMsg.emit(evt.file() or "raise")
            return True
        return QApplication.event(self, evt)
Example #42
0
class AnkiApp(QApplication):

    # Single instance support on Win32/Linux
    ##################################################

    KEY = "anki"+checksum(getpass.getuser())
    TMOUT = 5000

    def __init__(self, argv):
        QApplication.__init__(self, argv)
        self._argv = argv

    def secondInstance(self):
        # we accept only one command line argument. if it's missing, send
        # a blank screen to just raise the existing window
        opts, args = parseArgs(self._argv)
        buf = "raise"
        if args and args[0]:
            buf = os.path.abspath(args[0])
        if self.sendMsg(buf):
            print "Already running; reusing existing instance."
            return True
        else:
            # send failed, so we're the first instance or the
            # previous instance died
            QLocalServer.removeServer(self.KEY)
            self._srv = QLocalServer(self)
            self.connect(self._srv, SIGNAL("newConnection()"), self.onRecv)
            self._srv.listen(self.KEY)
            return False

    def sendMsg(self, txt):
        sock = QLocalSocket(self)
        sock.connectToServer(self.KEY, QIODevice.WriteOnly)
        if not sock.waitForConnected(self.TMOUT):
            # first instance or previous instance dead
            return False
        sock.write(txt)
        if not sock.waitForBytesWritten(self.TMOUT):
            # existing instance running but hung
            return False
        sock.disconnectFromServer()
        return True

    def onRecv(self):
        sock = self._srv.nextPendingConnection()
        if not sock.waitForReadyRead(self.TMOUT):
            sys.stderr.write(sock.errorString())
            return
        buf = sock.readAll()
        buf = unicode(buf, sys.getfilesystemencoding(), "ignore")
        self.emit(SIGNAL("appMsg"), buf)
        sock.disconnectFromServer()

    # OS X file/url handler
    ##################################################

    def event(self, evt):
        if evt.type() == QEvent.FileOpen:
            self.emit(SIGNAL("appMsg"), evt.file() or "raise")
            return True
        return QApplication.event(self, evt)
Example #43
0
 def _checksum(self, path: str) -> str:
     with open(path, "rb") as f:
         return checksum(f.read())
Example #44
0
 def _pwhash(self, passwd):
     return checksum(unicode(self.meta['id']) + unicode(passwd))
Example #45
0
 def _checksum(self, path):
     """Checksum of file at path"""
     with open(path, "rb") as f:
         return checksum(f.read())
Example #46
0
 def _generate_session_key(self):
     return checksum(str(random.random()))[:8]
Example #47
0
 def _pwhash(self, passwd):
     return checksum(unicode(self.meta['id'])+unicode(passwd))
Example #48
0
 def filesIdentical(self, path1, path2):
     "True if files are the same."
     return (checksum(open(path1, "rb").read()) ==
             checksum(open(path2, "rb").read()))
Example #49
0
def mediaFilename(path):
    "Return checksum.ext for path"
    new = checksum(open(path, "rb").read())
    ext = os.path.splitext(path)[1].lower()
    return "%s%s" % (new, ext)
Example #50
0
 def _addPastedImage(self, data, ext):
     # hash and write
     csum = checksum(data)
     fname = "{}-{}{}".format("paste", csum, ext)
     return self._addMediaFromData(fname, data)
Example #51
0
 def __init__(self, hkey=None, con=None):
     self.hkey = hkey
     self.skey = checksum(str(random.random()))[:8]
     self.con = con or httpCon()
Example #52
0
 def __init__(self, hkey=None, client=None):
     self.hkey = hkey
     self.skey = checksum(str(random.random()))[:8]
     self.client = client or AnkiRequestsClient()
     self.postVars = {}
Example #53
0
 def _addPastedImage(self, data: bytes, ext: str) -> str:
     # hash and write
     csum = checksum(data)
     fname = f"paste-{csum}{ext}"
     return self._addMediaFromData(fname, data)
Example #54
0
 def filesIdentical(self, path1, path2):
     "True if files are the same."
     return (checksum(open(path1, "rb").read()) ==
             checksum(open(path2, "rb").read()))
Example #55
0
 def ensure_name_unique(self, notetype: NotetypeDict) -> None:
     existing_id = self.id_for_name(notetype["name"])
     if existing_id is not None and existing_id != notetype["id"]:
         notetype["name"] += "-" + checksum(str(time.time()))[:5]