def load(self, decks, dconf): self.decks = json.loads(decks) self.dconf = json.loads(dconf) # set limits to within bounds found = False for c in list(self.dconf.values()): for t in ('rev', 'new'): pd = 'perDay' if c[t][pd] > 999999: c[t][pd] = 999999 self.save(c) found = True if not found: self.changed = False
def syncRanks(): # upload rank db files = {'file': open(RANK_DATABASE, 'rb')} url = address + 'api/uploadranks' tokenUsed = tokenizedAuth() if tokenUsed: s = requests.Session() r = s.get(url, files=files, auth=HTTPBasicAuth(tokenUsed[0], tokenUsed[1])) conn = sqlite3.connect(RANK_DATABASE) c = conn.cursor() # update local db with valesn try: response = json.loads(r.content) ranklist = response['rank_list'] for item in ranklist: try: c.execute( 'INSERT OR REPLACE INTO rankingSystem VALUES (?,?,?)', item) conn.commit() except sqlite3.IntegrityError: pass except ValueError: # meaning theres nothign to update pass c.execute("""UPDATE overallRanking SET timesync = 0""") conn.commit() conn.close()
def get_current_keybindings(): keybindings = None try: with codecs.open(PrefHelper.get_keybindings_path(), encoding="utf8") as f: # validate JSON result_json = u"" for line in f: line = line.strip() if line == u"": continue # strip out comments from keybindings file if line.startswith(u"//"): continue else: result_json += line keybindings = json.loads(result_json) except (ValueError, IOError) as e: # file is missing or is not valid JSON: revert to default bindings # and create a keybindings file if it doesn't exist print e # TODO: log error if not os.path.exists(PrefHelper.get_keybindings_path()): PrefHelper.create_keybindings_file() return PrefHelper.get_default_keybindings() else: # keybindings loaded from file should have exactly as many # key-value pairs as in the default keybindings default_keybindings = PrefHelper.get_default_keybindings() keybindings = PrefHelper.normalize_user_prefs( default_keybindings, keybindings) keybindings = utility.check_user_keybindings( default_keybindings, keybindings, const.PLATFORM) return keybindings
def setTokenToOnlineVer(): url = address + 'api/getSetRank' tokenUsed = tokenizedAuth() if tokenUsed: s = requests.Session() try: r = s.get(url, auth=HTTPBasicAuth(tokenUsed[0], tokenUsed[1])) if r.status_code != 200: if r.status_code == 401: pass else: showInfo( "We had a problem connecting, maybe your token has expired?: Status Code %s" % r.status_code) else: response = json.loads(r.content) valu = response["user rank"] conn = sqlite3.connect(RANK_DATABASE) c = conn.cursor() c.execute( """update tokens set online_rank = ? where id = 1 """, (valu, )) conn.commit() conn.close() except: # showInfo("Something went wrong when looking for your rank filter online.\nRestart Anki to refresh your rank filter if you want it changed.") pass
def run(self): # extract the deck from the zip file self.zip = z = zipfile.ZipFile(self.file) col = z.read("collection.anki2") colpath = tmpfile(suffix=".anki2") with open(colpath, "wb") as f: f.write(col) self.file = colpath # we need the media dict in advance, and we'll need a map of fname -> # number to use during the import self.nameToNum = {} dir = self.col.media.dir() for k, v in list(json.loads(z.read("media").decode("utf8")).items()): path = os.path.abspath(os.path.join(dir, v)) if os.path.commonprefix([path, dir]) != dir: raise Exception("Invalid file") self.nameToNum[unicodedata.normalize("NFC", v)] = k # run anki2 importer Anki2Importer.run(self) # import static media for file, c in list(self.nameToNum.items()): if not file.startswith("_") and not file.startswith("latex-"): continue path = os.path.join(self.col.media.dir(), file) if not os.path.exists(path): with open(path, "wb") as f: f.write(z.read(c))
def mediaChanges(self, **kw): self.postVars = dict( sk=self.skey, ) resp = json.loads( self.req("mediaChanges", StringIO(json.dumps(kw)))) return self._dataOnly(resp)
def mediatest(self, cmd): self.postVars = dict( k=self.hkey, ) return self._dataOnly(json.loads( self.req("newMediaTest", StringIO( json.dumps(dict(cmd=cmd))))))
def addFilesFromZip(self, zipData): "Extract zip data; true if finished." f = StringIO(zipData) z = zipfile.ZipFile(f, "r") media = [] # get meta info first meta = json.loads(z.read("_meta")) # then loop through all files cnt = 0 for i in z.infolist(): if i.filename == "_meta": # ignore previously-retrieved meta continue else: data = z.read(i) csum = checksum(data) name = meta[i.filename] if not isinstance(name, unicode): name = unicode(name, "utf8") # normalize name for platform if isMac: name = unicodedata.normalize("NFD", name) else: name = unicodedata.normalize("NFC", name) # save file open(name, "wb").write(data) # update db media.append((name, csum, self._mtime(name), 0)) cnt += 1 if media: self.db.executemany( "insert or replace into media values (?,?,?,?)", media) return cnt
def addFilesFromZip(self, zipData): "Extract zip data; true if finished." from io import StringIO f = StringIO(zipData) z = zipfile.ZipFile(f, "r") media = [] # get meta info first meta = json.loads(z.read("_meta")) # then loop through all files cnt = 0 for i in z.infolist(): if i.filename == "_meta": # ignore previously-retrieved meta continue else: data = z.read(i) csum = checksum(data) name = meta[i.filename] if not isinstance(name, str): name = str(name, "utf8") # normalize name for platform if isMac: name = unicodedata.normalize("NFD", name) else: name = unicodedata.normalize("NFC", name) # save file open(name, "wb").write(data) # update db media.append((name, csum, self._mtime(name), 0)) cnt += 1 if media: self.db.executemany( "insert or replace into media values (?,?,?,?)", media) return cnt
def run(self): # extract the deck from the zip file self.zip = z = zipfile.ZipFile(self.file) # v2 scheduler? try: z.getinfo("collection.anki21") suffix = ".anki21" except KeyError: suffix = ".anki2" col = z.read("collection"+suffix) colpath = tmpfile(suffix=suffix) with open(colpath, "wb") as f: f.write(col) self.file = colpath # we need the media dict in advance, and we'll need a map of fname -> # number to use during the import self.nameToNum = {} dir = self.col.media.dir() for k, v in list(json.loads(z.read("media").decode("utf8")).items()): path = os.path.abspath(os.path.join(dir, v)) if os.path.commonprefix([path, dir]) != dir: raise Exception("Invalid file") self.nameToNum[unicodedata.normalize("NFC",v)] = k # run anki2 importer Anki2Importer.run(self) # import static media for file, c in list(self.nameToNum.items()): if not file.startswith("_") and not file.startswith("latex-"): continue path = os.path.join(self.col.media.dir(), file) if not os.path.exists(path): with open(path, "wb") as f: f.write(z.read(c))
def onlineLast(): url = address + 'api/updateTimeLastUsed' tokenUsed = tokenizedAuth() if tokenUsed: s = requests.Session() try: r = s.post(url, auth=HTTPBasicAuth(tokenUsed[0], tokenUsed[1])) except Exception as e: showInfo("We couldn't connect to MFC \n\nError ref:%s" % str(e)) return "" if r.status_code != 200: if r.status_code == 401: showInfo( "Connect to medicflashcards to dismiss this message\nTo do this, go to Tools > Medic Flashcards, and entering your account details." ) else: showInfo( "It looks like your token expired! This means we coulnd't syncronise with the server. \n\nPlease reconnect by going to Tools > Medic Flashcards. \nStatus code: %s" % str(r.status_code)) return "" else: response = json.loads(r.content) timess = response['user_seen'] timess = str(timess) new = timess.split('.') del new[-1] datess = new[0] datess = datetime.datetime.strptime(datess, "%Y-%m-%d %H:%M:%S") return datess else: return ""
def accept(self): txt = self.form.text.toPlainText() error = None try: new_conf = json.loads(txt) except ValueError as e: new_conf = None error = repr(e) if new_conf and not isinstance(new_conf, dict): error = "Top level object must be a dictionary." if error: showInfo("The configuration seems to be invalid. Please make " "sure you haven't made a typo or forgot a control " "character (e.g. commas, brackets, etc.). " "Original error message follows below:\n\n{}" "\n\nIf you're not sure what's wrong you can start " "from scratch by clicking on 'Restore Defaults' " "in the config window.".format(error)) return act = self.mgr.conf_updated_action if act: act(new_conf) super(ConfigEditor, self).accept()
def _readConfig(self): conf={} moduleDir, _ = os.path.split(__file__) # Read config.json path = os.path.join(moduleDir,'config.json') if os.path.exists(path): with open(path, 'r', encoding='utf-8') as f: data=f.read() conf=json.loads(data) # Read meta.json path = os.path.join(moduleDir,'meta.json') if os.path.exists(path): with open(path, 'r', encoding='utf-8') as f: data=f.read() meta=json.loads(data) conf=nestedUpdate(conf,meta.get('config',{})) return conf
def meta(self): ret = self.req( "meta", StringIO(json.dumps(dict(v=SYNC_VER))), badAuthRaises=False) if not ret: # invalid auth return return json.loads(ret)
def begin(self): self.postVars = dict(k=self.hkey, v="ankidesktop,%s,%s" % (anki.version, platDesc())) ret = self._dataOnly( json.loads(self.req("begin", StringIO(json.dumps(dict()))))) self.skey = ret['sk'] return ret
def meta(self): ret = self.req("meta", StringIO(json.dumps(dict(v=SYNC_VER))), badAuthRaises=False) if not ret: # invalid auth return return json.loads(ret)
def readFile(self, fname, jsn=True): moduleDir, _ = os.path.split(__file__) path = os.path.join(moduleDir, fname) if os.path.exists(path): with open(path, 'r', encoding='utf-8') as f: data = f.read() if jsn: return json.loads(data) return data
def load(self) : fp = Storage(self.word).getPath() + self.word + '.mtp' if os.path.exists(fp) : arr_data_all = [] with open(fp, 'r') as f: arr_data_all = json.load(f) self.stars = json.loads(arr_data_all[0]) arr_data = json.loads(arr_data_all[1]) for st in arr_data : e = EntityEx() e.loads(st) self.data.append(e) return True else : return False
def begin(self): self.postVars = dict( k=self.hkey, v="ankidesktop,%s,%s"%(anki.version, platDesc()) ) ret = self._dataOnly(json.loads(self.req( "begin", StringIO(json.dumps(dict()))))) self.skey = ret['sk'] return ret
def myBridge(self, str, _old=None): if str.startswith("autocomplete"): (type, jsonText) = str.split(":", 1) result = json.loads(jsonText) text = self.mungeHTML(result['text']) # bail out if the user hasn't actually changed the field previous = "%d:%s" % (self.currentField, text) if self.prevAutocomplete == previous: return self.prevAutocomplete = previous if text == "" or len(text) > 500 or self.note is None: self.web.eval("$('.autocomplete').remove();"); return field = self.note.model()['flds'][self.currentField] if field['name'] in noAutocompleteFields: field['no_autocomplete'] = True if 'no_autocomplete' in field.keys() and field['no_autocomplete']: return # find a value from the same model and field whose # prefix is what the user typed so far query = "'note:%s' '%s:%s*'" % ( self.note.model()['name'], field['name'], text) col = self.note.col res = col.findCards(query, order=True) if len(res) == 0: self.web.eval("$('.autocomplete').remove();"); return # pull out the full value value = col.getCard(res[0]).note().fields[self.currentField] escaped = json.dumps(value) self.web.eval(""" $('.autocomplete').remove(); if (currentField) { $('<div class="autocomplete">' + %s + '</div>').click(function () { currentField.focus(); currentField.innerHTML = %s; saveField("key"); }).insertAfter(currentField) } """ % (escaped, escaped)) else: _old(self, str)
def load(self): (self.crt, self.mod, self.scm, self.dty, self._usn, self.ls, self.conf, models, decks, dconf, tags) = self.db.first(""" select crt, mod, scm, dty, usn, ls, conf, models, decks, dconf, tags from col""") self.conf = json.loads(self.conf) self.models.load(models) self.decks.load(decks, dconf) self.tags.load(tags)
def hostKey(self, user, pw): "Returns hkey or none if user/pw incorrect." self.postVars = dict() ret = self.req("hostKey", StringIO(json.dumps(dict(u=user, p=pw))), badAuthRaises=False) if not ret: # invalid auth return self.hkey = json.loads(ret)["key"] return self.hkey
def meta(self): ret = self.req( "meta", StringIO(json.dumps(dict( v=SYNC_VER, cv="ankidesktop,%s,%s"%(anki.version, platDesc())))), badAuthRaises=False) if not ret: # invalid auth return return json.loads(ret)
def bridge(self, str): if not self.note or not runHook: # shutdown return # focus lost or key/button pressed? if str.startswith("blur") or str.startswith("key"): (type, txt) = str.split(":", 1) txt = self.mungeHTML(txt) # misbehaving apps may include a null byte in the text txt = txt.replace("\x00", "") # reverse the url quoting we added to get images to display txt = unicode(urllib2.unquote(txt.encode("utf8")), "utf8", "replace") # make sure a trailing <br /> is removed txt = re.sub("(<br />)*$", "", txt) self.note.fields[self.currentField] = txt if not self.addMode: self.note.flush() self.mw.requireReset() if type == "blur": self.disableButtons() # run any filters if runFilter("editFocusLost", False, self.note, self.currentField): # something updated the note; schedule reload def onUpdate(): self.stealFocus = True self.loadNote() self.stealFocus = False self.checkValid() self.mw.progress.timer(100, onUpdate, False) else: self.checkValid() else: runHook("editTimer", self.note) self.checkValid() # focused into field? elif str.startswith("focus"): (type, num) = str.split(":", 1) self.enableButtons() self.currentField = int(num) runHook("editFocusGained", self.note, self.currentField) # state buttons changed? elif str.startswith("state"): (cmd, txt) = str.split(":", 1) r = json.loads(txt) self._buttons['text_bold'].setChecked(r['bold']) self._buttons['text_italic'].setChecked(r['italic']) self._buttons['text_under'].setChecked(r['under']) self._buttons['text_super'].setChecked(r['super']) self._buttons['text_sub'].setChecked(r['sub']) elif str.startswith("dupes"): self.showDupes() else: print str
def myBridge(self, str, _old=None): if str.startswith("autocomplete"): (type, jsonText) = str.split(":", 1) result = json.loads(jsonText) text = self.mungeHTML(result["text"]) # bail out if the user hasn't actually changed the field previous = "%d:%s" % (self.currentField, text) if self.prevAutocomplete == previous: return self.prevAutocomplete = previous if text == "" or len(text) > 500 or self.note is None: self.web.eval("$('.autocomplete').remove();") return field = self.note.model()["flds"][self.currentField] if field["name"] in noAutocompleteFields: field["no_autocomplete"] = True if "no_autocomplete" in field.keys() and field["no_autocomplete"]: return # find a value from the same model and field whose # prefix is what the user typed so far query = "'note:%s' '%s:%s*'" % (self.note.model()["name"], field["name"], text) col = self.note.col res = col.findCards(query, order=True) if len(res) == 0: self.web.eval("$('.autocomplete').remove();") return # pull out the full value value = col.getCard(res[0]).note().fields[self.currentField] escaped = json.dumps(value) self.web.eval( """ $('.autocomplete').remove(); if (currentField) { $('<div class="autocomplete">' + %s + '</div>').click(function () { currentField.focus(); currentField.innerHTML = %s; saveField("key"); }).insertAfter(currentField) } """ % (escaped, escaped) ) else: _old(self, str)
def hostKey(self, user, pw): "Returns hkey or none if user/pw incorrect." ret = self.req( "hostKey", StringIO(json.dumps(dict(u=user, p=pw))), badAuthRaises=False, hkey=False) if not ret: # invalid auth return self.hkey = json.loads(ret)['key'] return self.hkey
def bridge(self, str): if not self.note or not runHook: # shutdown return # focus lost or key/button pressed? if str.startswith("blur") or str.startswith("key"): (type, txt) = str.split(":", 1) txt = self.mungeHTML(txt) # misbehaving apps may include a null byte in the text txt = txt.replace("\x00", "") # reverse the url quoting we added to get images to display txt = unicode(urllib2.unquote( txt.encode("utf8")), "utf8", "replace") # make sure a trailing <br /> is removed txt = re.sub("(<br />)*$", "", txt) self.note.fields[self.currentField] = txt if not self.addMode: self.note.flush() self.mw.requireReset() if type == "blur": self.disableButtons() # run any filters if runFilter( "editFocusLost", False, self.note, self.currentField): # something updated the note; schedule reload def onUpdate(): self.stealFocus = True self.loadNote() self.stealFocus = False self.checkValid() self.mw.progress.timer(100, onUpdate, False) else: self.checkValid() else: runHook("editTimer", self.note) self.checkValid() # focused into field? elif str.startswith("focus"): (type, num) = str.split(":", 1) self.enableButtons() self.currentField = int(num) runHook("editFocusGained", self.note, self.currentField) # state buttons changed? elif str.startswith("state"): (cmd, txt) = str.split(":", 1) r = json.loads(txt) self._buttons['text_bold'].setChecked(r['bold']) self._buttons['text_italic'].setChecked(r['italic']) self._buttons['text_under'].setChecked(r['under']) self._buttons['text_super'].setChecked(r['super']) self._buttons['text_sub'].setChecked(r['sub']) elif str.startswith("dupes"): self.showDupes() else: print str
def hostKey(self, user, pw): "Returns hkey or none if user/pw incorrect." self.postVars = dict() ret = self.req( "hostKey", io.BytesIO(json.dumps(dict(u=user, p=pw)).encode("utf8")), badAuthRaises=False) if not ret: # invalid auth return self.hkey = json.loads(ret.decode("utf8"))['key'] return self.hkey
def meta(self): self.postVars = dict( k=self.hkey, s=self.skey, ) ret = self.req( "meta", io.BytesIO(json.dumps(dict( v=SYNC_VER, cv="ankidesktop,%s,%s"%(anki.version, platDesc()))).encode("utf8")), badAuthRaises=False) if not ret: # invalid auth return return json.loads(ret.decode("utf8"))
def load(self, decks, dconf): """Assign decks and dconf of this object using the two parameters. It also ensures that the number of cards per day is at most 999999 or correct this error. Keyword arguments: decks -- json dic associating to each id (as string) its deck dconf -- json dic associating to each id (as string) its configuration(option) """ self.decks = json.loads(decks) self.dconf = json.loads(dconf) # set limits to within bounds found = False for c in list(self.dconf.values()): for t in ('rev', 'new'): pd = 'perDay' if c[t][pd] > 999999: c[t][pd] = 999999 self.save(c) found = True if not found: self.changed = False
def load_preferences_from_disk(): """ Load the current preferences from disk. If no preferences file is found, or if it is corrupted, return the default preferences. """ prefs = None try: with codecs.open(PrefHelper.get_preference_path(), encoding="utf8") as f: prefs = json.loads(f.read()) except: prefs = PrefHelper.default_conf else: default_conf = prefs return prefs
def bridge(self, str): if not self.note or not runHook: # shutdown return # focus lost or key/button pressed? if str.startswith("blur") or str.startswith("key"): (type, txt) = str.split(":", 1) txt = self.mungeHTML(txt) # misbehaving apps may include a null byte in the text txt = txt.replace("\x00", "") # reverse the url quoting we added to get images to display txt = unicode(urllib2.unquote( txt.encode("utf8")), "utf8", "replace") self.note.fields[self.currentField] = txt if type == "blur": self.disableButtons() # run any filters if runFilter( "editFocusLost", False, self.note, self.currentField): # something updated the note; schedule reload def onUpdate(): self.loadNote() self.checkValid() else: self.checkValid() else: runHook("editTimer", self.note) self.checkValid() # focused into field? elif str.startswith("focus"): (type, num) = str.split(":", 1) self.enableButtons() self.currentField = int(num) # state buttons changed? elif str.startswith("state"): (cmd, txt) = str.split(":", 1) r = json.loads(txt) self._buttons['text_bold'].setChecked(r['bold']) self._buttons['text_italic'].setChecked(r['italic']) self._buttons['text_under'].setChecked(r['under']) self._buttons['text_super'].setChecked(r['super']) self._buttons['text_sub'].setChecked(r['sub']) elif str.startswith("dupes"): self.showDupes() # save current selection elif str.startswith("selection"): (type, start, end) = str.split(":", 2) self.currentSelection = (int(start), int(end)) else: print str
def _getModTimes(self, client, chunk): try: url = aqt.appShared + "updates/" + ",".join(chunk) crawler = urllib2.build_opener() crawler.addheaders = [('User-agent', USER_AGENT)] c = crawler.open(url) data = c.read() return json.loads(data) except ValueError: utils.showInfo("Not a valid url") return except urllib2.HTTPError as error: showWarning('The remote server has returned an error:' ' HTTP Error {} ({})'.format(error.code, error.reason)) return
def linkHandler(self, url, _old): """Anki 2.0: Extend link handler with browser links Anki 2.1: Also acts as the JS <-> Py bridge""" if url.startswith("dctBrws"): (cmd, arg) = url.split(":", 1) if not arg: return browseToNid(arg) elif anki21 and url.startswith("dctLookup"): (cmd, payload) = url.split(":", 1) term, ignore_nid = json.loads(payload) term = term.strip() return getContentFor(term, ignore_nid) else: return _old(self, url)
def run(self): # extract the deck from the zip file z = zipfile.ZipFile(self.file) col = z.read("collection.anki2") colpath = tmpfile(suffix=".anki2") open(colpath, "wb").write(col) # pass it to the anki2 importer self.file = colpath Anki2Importer.run(self) # import media media = json.loads(z.read("media")) for c, file in media.items(): path = os.path.join(self.col.media.dir(), file) if not os.path.exists(path): open(path, "wb").write(z.read(c))
def accept(self): txt = self.form.editor.toPlainText() try: new_conf = json.loads(txt) except Exception as e: showInfo(_("Invalid configuration: ") + repr(e)) return if new_conf != self.conf: self.mgr.writeConfig(self.addon, new_conf) # does the add-on define an action to be fired? act = self.mgr.configUpdatedAction(self.addon) if act: act(new_conf) super(ConfigEditor, self).accept()
def accept(self): txt = self.form.text.toPlainText() try: new_conf = json.loads(txt) except ValueError as e: showInfo("Invalid configuration, restoring previous config: " + repr(e)) return if not isinstance(new_conf, dict): showInfo("Invalid configuration, restoring previous config: " "top level object must be a map") return self.mgr["local"] = new_conf self.mgr.save(storage_name="local") super(ConfigEditor, self).accept()
def addonConfigDefaults(self, dir): path = os.path.join(self.addonsFolder(dir), "config.json") try: with open(path, encoding="utf8") as f: t = f.read() try: return json.loads(t) except Exception as e: print "Here is a JSON error in default config of addon {dir}:".format( dir=sys.stderr) print str(e) print "\n\n===================\n\nCopy and save past config to be sure that it is not overwritten by accident. Past config was {t}".format( t=sys.stderr) return dict() except: return None
def load(self): (self.crt, self.mod, self.scm, self.dty, # no longer used self._usn, self.ls, self.conf, models, decks, dconf, tags) = self.db.first(""" select crt, mod, scm, dty, usn, ls, conf, models, decks, dconf, tags from col""") self.conf = json.loads(self.conf) #self.models.load(models) self.decks.load(decks, dconf)
def syncAdd(self, zipData): "Extract zip data; true if finished." f = StringIO(zipData) z = zipfile.ZipFile(f, "r") finished = False meta = None media = [] sizecnt = 0 # get meta info first assert z.getinfo("_meta").file_size < 100000 meta = json.loads(z.read("_meta")) nextUsn = int(z.read("_usn")) # then loop through all files for i in z.infolist(): # check for zip bombs sizecnt += i.file_size assert sizecnt < 100*1024*1024 if i.filename == "_meta" or i.filename == "_usn": # ignore previously-retrieved meta continue elif i.filename == "_finished": # last zip in set finished = True else: data = z.read(i) csum = checksum(data) name = meta[i.filename] # can we store the file on this system? if self.illegal(name): continue # save file open(name, "wb").write(data) # update db media.append((name, csum, self._mtime(name))) # remove entries from local log self.db.execute("delete from log where fname = ?", name) # update media db and note new starting usn if media: self.db.executemany( "insert or replace into media values (?,?,?)", media) self.setUsn(nextUsn) # commits # if we have finished adding, we need to record the new folder mtime # so that we don't trigger a needless scan if finished: self.syncMod() return finished
def addonMeta(self, dir): path = self._addonMetaPath(dir) try: with open(path, encoding="utf8") as f: t = f.read() try: return json.loads(t) except Exception as e: print "Here is a JSON error in current config of addon {dir}:".format( dir=sys.stderr) print str(e) print "\n\n===================\n\n \ Copy and save past config to be sure that it is not overwritten by accident. Past config was {t}".format( t=sys.stderr) except: pass return dict()
def syncAdd(self, zipData): "Extract zip data; true if finished." f = StringIO(zipData) z = zipfile.ZipFile(f, "r") finished = False meta = None media = [] # get meta info first meta = json.loads(z.read("_meta")) nextUsn = int(z.read("_usn")) # then loop through all files for i in z.infolist(): if i.filename == "_meta" or i.filename == "_usn": # ignore previously-retrieved meta continue elif i.filename == "_finished": # last zip in set finished = True else: data = z.read(i) csum = checksum(data) name = meta[i.filename] if not isinstance(name, unicode): name = unicode(name, "utf8") # normalize name for platform if isMac: name = unicodedata.normalize("NFD", name) else: name = unicodedata.normalize("NFC", name) # save file open(name, "wb").write(data) # update db media.append((name, csum, self._mtime(name))) # remove entries from local log self.db.execute("delete from log where fname = ?", name) # update media db and note new starting usn if media: self.db.executemany( "insert or replace into media values (?,?,?)", media) self.setUsn(nextUsn) # commits # if we have finished adding, we need to record the new folder mtime # so that we don't trigger a needless scan if finished: self.syncMod() return finished
def syncAdd(self, zipData): "Extract zip data; true if finished." f = StringIO(zipData) z = zipfile.ZipFile(f, "r") finished = False meta = None media = [] # get meta info first meta = json.loads(z.read("_meta")) nextUsn = int(z.read("_usn")) # then loop through all files for i in z.infolist(): if i.filename == "_meta" or i.filename == "_usn": # ignore previously-retrieved meta continue elif i.filename == "_finished": # last zip in set finished = True else: data = z.read(i) csum = checksum(data) name = meta[i.filename] if not isinstance(name, unicode): name = unicode(name, "utf8") # normalize name for platform if isMac: name = unicodedata.normalize("NFD", name) else: name = unicodedata.normalize("NFC", name) # save file open(name, "wb").write(data) # update db media.append((name, csum, self._mtime(name))) # remove entries from local log self.db.execute("delete from log where fname = ?", name) # update media db and note new starting usn if media: self.db.executemany("insert or replace into media values (?,?,?)", media) self.setUsn(nextUsn) # commits # if we have finished adding, we need to record the new folder mtime # so that we don't trigger a needless scan if finished: self.syncMod() return finished
def load_preferences_from_disk(): """ Load the current preferences from disk. If no preferences file is found, or if it is corrupted, return the default preferences. """ prefs = None try: with codecs.open(PrefHelper.get_preference_path(), encoding="utf8") as f: encoded_prefs = f.read(const.MAX_BYTES_PREFS) decoded_prefs = base64.b64decode(encoded_prefs) prefs = json.loads(decoded_prefs) except: prefs = PrefHelper.get_default_preferences() else: prefs = PrefHelper.normalize_user_prefs( PrefHelper.get_default_preferences(), prefs) return prefs
def get_current_preferences(): prefs = None try: with codecs.open(PrefHelper.get_preference_path(), encoding="utf8") as f: encoded_prefs = f.read(const.MAX_BYTES_PREFS) decoded_prefs = base64.b64decode(encoded_prefs) prefs = json.loads(decoded_prefs) except: # file does not exist or is corrupted: fall back to default with codecs.open(PrefHelper.get_preference_path(), "w", encoding="utf8") as f: prefs = PrefHelper.get_default_preferences() json.dump(prefs, f) else: prefs = PrefHelper.normalize_user_prefs( PrefHelper.get_default_preferences(), prefs) PrefHelper.save_prefs(prefs) return prefs
def run(self): # extract the deck from the zip file self.zip = z = zipfile.ZipFile(self.file) col = z.read("collection.anki2") colpath = tmpfile(suffix=".anki2") open(colpath, "wb").write(col) self.file = colpath # we need the media dict in advance, and we'll need a map of fname -> # number to use during the import self.nameToNum = {} for k, v in json.loads(z.read("media")).items(): self.nameToNum[v] = k # run anki2 importer Anki2Importer.run(self) # import static media for file, c in self.nameToNum.items(): if not file.startswith("_"): continue path = os.path.join(self.col.media.dir(), file) if not os.path.exists(path): open(path, "wb").write(z.read(c))
def write(self,word) : content = [] fullname = Storage(word).getPath() + word + ".bin.txt" try : with open(fullname, 'r') as f: content = f.readlines() except IOError : pass #url = u'http://translate.google.com//translate_a/ex?sl=en&tl=ru&q=' + word url = u'http://translate.google.com//translate_a/single?client=t&sl=en&tl=ru&dt=ex&q=' + word user_agent = 'Mozilla/5.0' request = urllib2.Request(url) request.add_header('User-agent', user_agent) response = urllib2.urlopen(request) if response.code == 200 : s = response.read() if len(s) < 20 : return s = s.replace("[,,,,,,,,,,,,,[", "") s = s.replace("]]]]", "]]") s = s.replace(",,", ",\"\",") s = s.replace(",,", ",\"\",") #utils.showInfo(s) #utils.showInfo(s[2500:]) j = json.loads(s) l = 0 try : l = len(j) except IndexError : pass if l > 0 : with open(fullname, "a") as f1: for s in j : st = stripHTML(removeNonAscii(s[0])) + "\n" try : if not st in content : f1.write(st) except: utils.showInfo(word) break
def run(self): if not self.config['updates']: return d = self.stats d['proto'] = 2 d = urllib.urlencode(d) try: f = urllib2.urlopen(baseUrl + "getQtVersion", d) resp = f.read() if not resp: return resp = json.loads(resp) except: # behind proxy, corrupt message, etc return if resp['msg']: self.emit(SIGNAL("newMsg"), resp) if resp['latestVersion'] > aqt.appVersion: self.emit(SIGNAL("newVerAvail"), resp) diff = resp['currentTime'] - time.time() if abs(diff) > 300: self.emit(SIGNAL("clockIsOff"), diff)
def run(self): if not self.config['updates']: return d = self._data() d['proto'] = 1 d = urllib.urlencode(d) try: f = urllib2.urlopen(aqt.appUpdate, d) resp = f.read() if not resp: return resp = json.loads(resp) except: # behind proxy, corrupt message, etc return if resp['msg']: self.emit(SIGNAL("newMsg"), resp) if resp['ver']: self.emit(SIGNAL("newVerAvail"), resp['ver']) diff = resp['time'] - time.time() if abs(diff) > 300: self.emit(SIGNAL("clockIsOff"))
def decompress_and_json_load(data): """ Decode a base64-encoded string and return a string that is valid JSON. """ if not data: return u"" assert isinstance(data, unicode), "Input `data` is not Unicode" try: b64data = base64.b64decode(data) except (TypeError, UnicodeEncodeError) as e: # `data` is not a valid base64-encoded string print e # TODO: should be logged return "corrupted" try: ret = json.loads(b64data) return ret except ValueError as e: print e # TODO: should be logged return "corrupted"
def run(self): if not self.config['updates']: return d = self._data() d['proto'] = 1 d = urllib.parse.urlencode(d).encode("utf8") try: f = urllib.request.urlopen(aqt.appUpdate, d) resp = f.read() if not resp: print("update check load failed") return resp = json.loads(resp.decode("utf8")) except: # behind proxy, corrupt message, etc print("update check failed") return if resp['msg']: self.newMsg.emit(resp) if resp['ver']: self.newVerAvail.emit(resp['ver']) diff = resp['time'] - time.time() if abs(diff) > 300: self.clockIsOff.emit(diff)
def load(self, json_): "Load registry from JSON." self.changed = False self.models = json.loads(json_)
def load(self, decks, dconf): self.decks = json.loads(decks) self.dconf = json.loads(dconf) self.changed = False