def load_database(self, database_name): if self.server_only: # First see if web server needs to release database. try: con = httplib.HTTPConnection("127.0.0.1", self.config()["web_server_port"]) con.request("GET", "/release_database") response = con.getresponse() except: pass if not os.path.exists( expand_path(database_name, self.config().data_dir)): self.database().new(database_name) else: self.database().load(database_name) else: self.previous_database = self.config()["last_database"] if self.previous_database != database_name: if not os.path.exists( expand_path(database_name, self.config().data_dir)): self.database().new(database_name) else: self.database().load(database_name) return self.database()
def file_open(self): self.stopwatch().pause() db = self.database() basedir = self.config().basedir old_path = expand_path(self.config()["path"], basedir) filename = self.main_widget().open_file_dialog(path=old_path, filter=_("Mnemosyne databases") + " (*%s)" % db.suffix) if not filename: self.stopwatch().unpause() return if filename.startswith(os.path.join(basedir, "backups")): result = self.main_widget().question_box(\ _("Do you want to restore from this backup?"), _("Yes"), _("No"), "") if result == 0: # Yes db.abandon() db_path = expand_path(self.config()["path"], basedir) import shutil shutil.copy(filename, db_path) db.load(db_path) self.review_controller().reset() self.update_title() self.stopwatch().unpause() return try: self.log().saved_database() db.backup() db.unload() except RuntimeError, error: self.main_widget().error_box(unicode(error)) self.stopwatch().unpause() return
def _preprocess_media(self, fact_data): mediadir = self.database().mediadir() # os.path.normpath does not convert Windows separators to Unix # separators, so we need to make sure we internally store Unix paths. for key in fact_data: for match in re_src.finditer(fact_data[key]): fact_data[key] = fact_data[key].replace(match.group(), match.group().replace("\\", "/")) # Convert sound tags to audio tags. for key in fact_data: for match in re_sound.finditer(fact_data[key]): fact_data[key] = fact_data[key].replace(match.group(), match.group().replace("sound", "audio")) # Copy files to media directory, creating subdirectories as we go. for key in fact_data: for match in re_src.finditer(fact_data[key]): filename = match.group(1) if not os.path.isabs(filename): subdir = os.path.dirname(filename) subdirs = [] while subdir: subdirs.insert(0, os.path.join(mediadir, subdir)) subdir = os.path.dirname(subdir) for subdir in subdirs: if not os.path.exists(subdir): os.mkdir(subdir) source = expand_path(filename, self.importdir) dest = expand_path(filename, mediadir) if not os.path.exists(source): self.main_widget().information_box(\ _("Missing media file") + " %s" % source) fact_data[key] = fact_data[key].replace(match.group(), "src_missing=\"%s\"" % match.group(1)) else: shutil.copy(source, dest)
def do_import(self, filename, extra_tag_names=None): db = self.database() if filename.endswith("config.db"): self.main_widget().show_error(\ _("The configuration database is not used to store cards.")) return data_dir = self.config().data_dir receiving_database_filename = \ expand_path(self.config()["last_database"], data_dir) db.dump_to_science_log() # Heuristic to check if we haven't imported this database before. current_tag_ids = set([tag.id for tag in db.tags()]) db.load(filename) tag_ids_to_import = set([tag.id for tag in db.tags()]) if len(tag_ids_to_import.intersection(current_tag_ids)) >= 2: answer = self.main_widget().show_question(\ _("It looks like you've imported this database before. Importing it twice will generate duplicate log entries, which will skew your statistics. Do you want to continue?"), _("Abort"), _("Continue"), "") if answer == 0: db.load(receiving_database_filename) return # Export to temporary *.cards file. cards_format = Mnemosyne2Cards(self.component_manager) tmp_cards_filename = os.path.join(data_dir, "TMP.cards") cards_format.do_export(tmp_cards_filename, used_for_merging_dbs=True) old_deactivated_card_type_fact_view_ids = \ db.current_criterion().deactivated_card_type_fact_view_ids user_card_types = [card_type for card_type in db.card_types_in_use() \ if db.is_user_card_type(card_type)] # Get config info to be merged. old_config_dir = self.config().config_dir self.config().config_dir = os.path.dirname(filename) self.log().active = False self.config().load() old_config = self.config().copy() self.config().config_dir = old_config_dir self.config().load() self.log().active = True # Import the *.cards file into the receiving database. db.load(receiving_database_filename) log_index_before_import = db.current_log_index() db.importing_with_learning_data = True cards_format.do_import(\ tmp_cards_filename, extra_tag_names, show_metadata=False) db.importing_with_learning_data = False db.merge_logs_from_other_database(filename, log_index_before_import) os.remove(tmp_cards_filename) db.current_criterion().deactivated_card_type_fact_view_ids.update(\ old_deactivated_card_type_fact_view_ids) db.set_current_criterion(db.current_criterion()) for property_name in [ "background_colour", "font", "font_colour", "alignment", "hide_pronunciation_field" ]: self.log().edited_setting(property_name) for card_type in user_card_types: if card_type.id in old_config[property_name]: self.config()[property_name][card_type.id] = \ old_config[property_name][card_type.id] db.skip_science_log()
def load_database(self, filename): if not filename: filename = self.config()["last_database"] path = expand_path(filename, self.config().data_dir) try: if not os.path.exists(path): try: self.database().new(path) except Exception as e: from mnemosyne.libmnemosyne.translator import _ raise RuntimeError(\ _("Previous drive letter no longer available.")) else: self.database().load(path) self.controller().update_title() except RuntimeError as e: self.main_widget().show_error(str(e)) from mnemosyne.libmnemosyne.translator import _ self.main_widget().show_information(\ _("If you are using a USB key, refer to the instructions on the website so as not to be affected by drive letter changes.")) # Try to open a new database, but not indefinitely, otherwise this # could turn a crash into a much nastier one on Android. success = False counter = 0 while not success and counter <= 5: counter += 1 try: self.database().abandon() self.controller().show_open_file_dialog() success = True except RuntimeError as e: self.main_widget().show_error(str(e))
def expand_tag(self, tag, text): # Add "=" to make sure we don't match "Application Data". i = text.lower().find(tag + "=") while i != -1: start = text.find("\"", i) end = text.find("\"", start + 1) if start == -1 or end == -1: break if len(text[i:start].replace(" ", "")) > len(tag) + 1: break old_path = text[start+1:end] if not old_path.startswith("http:"): new_path = expand_path(old_path, self.database().media_dir()) if sys.platform == "win32": new_path = "/" + new_path.replace("\\", "/") new_path = new_path.replace("#", "%23") text = text[:start+1] + "file://" + new_path + text[end:] if sys.platform == "win32" and tag == "data": text = text.replace("file:///", "") # Since text is always longer now, we can start searching # from the previous end tag. i = text.lower().find(tag, end + 1) # Replace code word 'db_media:///' by the absolute path for use e.g. # in javascript. if "db_media:///" in text: text = text.replace("db_media:///", self.database().media_dir().replace("\\", "/") + "/") if not text.startswith("/"): text = "/" + text return text
def save(self, path=None): # Don't erase a database which failed to load. if self.load_failed == True: return -1 if not path: path = self.config()["path"] path = expand_path(path, self.config().basedir) # Update version. self.global_variables["version"] = self.version # Work around a sip bug: don't store card types, but their ids. for f in self.facts: f.card_type = f.card_type.id try: # Write to a backup file first, as shutting down Windows can # interrupt the dump command and corrupt the database. outfile = file(path + "~", 'wb') db = [self.tags, self.facts, self.cards, self.global_variables] cPickle.dump(db, outfile) outfile.close() shutil.move(path + "~", path) # Should be atomic. except: raise RuntimeError, _("Unable to save file.") \ + "\n" + traceback_string() self.config()["path"] = contract_path(path, self.config().basedir) # Work around sip bug again. for f in self.facts: f.card_type = self.card_type_by_id(f.card_type)
def load_database(self, filename): if not filename: filename = self.config()["last_database"] path = expand_path(filename, self.config().data_dir) try: if not os.path.exists(path): try: self.database().new(path) except: from mnemosyne.libmnemosyne.translator import _ raise RuntimeError(_("Previous drive letter no longer available.")) else: self.database().load(path) self.controller().update_title() except RuntimeError, e: from mnemosyne.libmnemosyne.translator import _ self.main_widget().show_error(unicode(e)) self.main_widget().show_information(\ _("If you are using a USB key, refer to the instructions on the website so as not to be affected by drive letter changes.")) success = False while not success: try: self.database().abandon() self.controller().show_open_file_dialog() success = True except RuntimeError, e: self.main_widget().show_error(unicode(e))
def load(self, path): path = expand_path(path, config().basedir) if self.is_loaded(): unload_database() if not os.path.exists(path): self.load_failed = True raise LoadError try: infile = file(path, 'rb') db = cPickle.load(infile) self.start_date = db[0] self.categories = db[1] self.facts = db[2] self.fact_views = db[3] self.cards = db[4] infile.close() self.load_failed = False except: self.load_failed = True raise InvalidFormatError(stack_trace=True) # Work around a sip bug: don't store card types, but their ids. for f in self.facts: f.card_type = card_type_by_id(f.card_type) # TODO: This was to remove database inconsistencies. Still needed? #for c in self.categories: # self.remove_category_if_unused(c) config()["path"] = contract_path(path, config().basedir) log().loaded_database() for f in component_manager.get_all("function_hook", "after_load"): f.run()
def download_tmp_audio_file(self, card_type, foreign_text): """Returns a temporary filename with the audio.""" language_id = self.config().card_type_property(\ "sublanguage_id", card_type) if not language_id: language_id = self.config().card_type_property(\ "language_id", card_type) if " ج " in foreign_text: singular, plural = foreign_text.split(" ج ") foreign_text = \ f"""<speak>{singular}<break time="0.3s"/>{plural}</speak>""" if "<br>" in foreign_text: foreign_text = "<speak>" + \ foreign_text.replace("<br>", """<break time="0.3s"/>""") +\ "</speak>" client = texttospeech.TextToSpeechClient() synthesis_input = texttospeech.SynthesisInput(ssml=foreign_text) voice = texttospeech.VoiceSelectionParams( language_code=language_id, ssml_gender=texttospeech.SsmlVoiceGender.FEMALE) audio_config = texttospeech.AudioConfig( audio_encoding=texttospeech.AudioEncoding.MP3) response = client.synthesize_speech(request={ "input": synthesis_input, "voice": voice, "audio_config": audio_config }) filename = expand_path("__GTTS__TMP__.mp3", self.database().media_dir()) with open(filename, 'wb') as mp3_file: mp3_file.write(response.audio_content) return filename
def accept(self): filename = self.filename_box.text().replace("\\", "/") if not filename: return QtWidgets.QDialog.accept(self) if os.path.isabs(filename): if not filename.startswith(\ self.database().media_dir().replace("\\", "/")): self.main_widget().show_error(\ _("Please select a filename inside the media directory.")) self.set_default_filename() return else: filename = contract_path(filename, self.database().media_dir()) # By now, filename is relative to the media dir. # Save subdirectory for this card type. local_dir = os.path.dirname(filename) if local_dir: self.config()["tts_dir_for_card_type_id"]\ [self.card_type.id] = local_dir full_local_dir = expand_path(local_dir, self.database().media_dir()) if not os.path.exists(full_local_dir): os.makedirs(full_local_dir) shutil.copyfile(self.tmp_filename, os.path.join(self.database().media_dir(), filename)) self.text_to_insert = "<audio src=\"" + filename + "\">" QtWidgets.QDialog.accept(self)
def load_database(self, filename): if not filename: filename = self.config()["last_database"] path = expand_path(filename, self.config().data_dir) try: if not os.path.exists(path): try: self.database().new(path) except: from mnemosyne.libmnemosyne.translator import _ raise RuntimeError( _("Previous drive letter no longer available.")) else: self.database().load(path) self.controller().update_title() except RuntimeError, e: from mnemosyne.libmnemosyne.translator import _ self.main_widget().show_error(unicode(e)) self.main_widget().show_information(\ _("If you are using a USB key, refer to the instructions on the website so as not to be affected by drive letter changes.")) success = False while not success: try: self.database().abandon() self.controller().show_open_file_dialog() success = True except RuntimeError, e: self.main_widget().show_error(unicode(e))
def new(self, path): if self.is_loaded(): self.unload() self._path = expand_path(path, self.config().basedir) if os.path.exists(self._path): os.remove(self._path) self.load_failed = False # Create tables. self.con.executescript(SCHEMA) self.con.execute("insert into global_variables(key, value) values(?,?)", ("version", self.version)) self.con.execute("""insert into partnerships(partner, _last_log_id) values(?,?)""", ("log.txt", 0)) self.con.commit() self.config()["path"] = contract_path(self._path, self.config().basedir) # Create default criterion. from mnemosyne.libmnemosyne.activity_criteria.default_criterion import \ DefaultCriterion self._current_criterion = DefaultCriterion(self.component_manager) self.add_activity_criterion(self._current_criterion) # Create media directory. mediadir = self.mediadir() if not os.path.exists(mediadir): os.mkdir(mediadir) os.mkdir(os.path.join(mediadir, "latex"))
def show_save_file_as_dialog(self): self.stopwatch().pause() if self.config()["single_database_help_shown"] == False: self.main_widget().show_information(_(self.single_database_help)) self.config()["single_database_help_shown"] = True self.flush_sync_server() suffix = self.database().suffix old_path = expand_path(self.config()["last_database"], self.config().data_dir) old_media_dir = self.database().media_dir() filename = self.main_widget().get_filename_to_save(path=old_path, filter=_("Mnemosyne databases") + " (*%s)" % suffix) if not filename: self.stopwatch().unpause() return if filename.endswith("config.db"): self.main_widget().show_information(\ _("The configuration database cannot be used to store cards.")) self.stopwatch().unpause() return if not filename.endswith(suffix): filename += suffix try: self.database().save(filename) new_media_dir = self.database().media_dir() if old_media_dir == new_media_dir: return if os.path.exists(new_media_dir): shutil.rmtree(new_media_dir) shutil.copytree(old_media_dir, new_media_dir) self.log().saved_database() except RuntimeError, error: self.main_widget().show_error(unicode(error.message)) self.stopwatch().unpause() return
def show_insert_img_dialog(self, filter): """Show a file dialog filtered on the supported filetypes, get a filename, massage it, and return it to the widget to be inserted. There is more media file logic inside the database code too, as the user could also just type in the html tags as opposed to passing through the file selector here. The reason we don't do all the operations in the database code, is that we want to display a nice short relative path back in the edit field. """ from mnemosyne.libmnemosyne.utils import copy_file_to_dir data_dir, media_dir = \ self.config().data_dir, self.database().media_dir() path = expand_path(self.config()["import_img_dir"], data_dir) filter = _("Image files") + " " + filter filename = self.main_widget().get_filename_to_open(\ path, filter, _("Insert image")) if not filename: return "" else: self.config()["import_img_dir"] = contract_path(\ os.path.dirname(filename), data_dir) filename = copy_file_to_dir(filename, media_dir) return contract_path(filename, media_dir)
def expand_tag(self, tag, text): # Add "=" to make sure we don't match "Application Data". i = text.lower().find(tag + "=") while i != -1: start = text.find("\"", i) end = text.find("\"", start + 1) if start == -1 or end == -1: break if len(text[i:start].replace(" ", "")) > len(tag) + 1: break old_path = text[start+1:end] if not old_path.startswith("http:"): new_path = expand_path(old_path, self.database().media_dir()) if sys.platform == "win32": new_path = "/" + new_path.replace("\\", "/") new_path = new_path.replace("#", "%23") if sys.platform != "win32" or tag != "data": new_path = "file://" + new_path text = text[:start+1] + new_path + text[end:] end = start + len(new_path) # Since text is always longer now, we can start searching # from the previous end tag. i = text.lower().find(tag, end + 1) # Replace code word 'db_media:///' by the absolute path for use e.g. # in javascript. if "db_media:///" in text: text = text.replace("db_media:///", self.database().media_dir().replace("\\", "/") + "/") return text
def preprocess_media(self, fact_data, tag_names): missing_media = False media_dir = self.database().media_dir() # os.path.normpath does not convert Windows separators to Unix # separators, so we need to make sure we internally store Unix paths. for fact_key in fact_data: for match in re_src.finditer(fact_data[fact_key]): fact_data[fact_key] = \ fact_data[fact_key].replace(match.group(), match.group().replace("\\", "/")) # Convert sound tags to audio tags. for fact_key in fact_data: for match in re_sound.finditer(fact_data[fact_key]): fact_data[fact_key] = fact_data[fact_key].replace( match.group(), match.group().replace("<sound src", "<audio src")) # Copy files to media directory, creating subdirectories as we go. # For missing media, we change the tag to scr_missing, which makes it # easier for the user to identify the problem if there is more than 1 # media file missing for a card. for fact_key in fact_data: for match in re_src.finditer(fact_data[fact_key]): filename = match.group(1) if not os.path.exists(filename) \ and not os.path.exists(\ expand_path(filename, self.import_dir)) \ and not os.path.exists(\ expand_path(filename, self.database().media_dir())): fact_data[fact_key] = \ fact_data[fact_key].replace(match.group(), "src_missing=\"%s\"" % filename) missing_media = True continue if not os.path.isabs(filename) and not os.path.exists(\ expand_path(filename, self.database().media_dir())): source = expand_path(filename, self.import_dir) dest = expand_path(filename, media_dir) directory = os.path.dirname(dest) if not os.path.exists(directory): os.makedirs(directory) copy(source, dest) if missing_media: tag_names.append(_("MISSING_MEDIA")) if not self.warned_about_missing_media: self.main_widget().show_information(\ _("Warning: media files were missing. These cards have been tagged as MISSING_MEDIA. You must also change 'src_missing' to 'src' in the text of these cards.")) self.warned_about_missing_media = True
def _process_media(self, fact): """Copy the media files to the media directory and edit the media table. We don't keep track of which facts use which media and delete a media file if it's no longer in use. The reason for this is that some people use the media directory as their only location to store their media files, and also use these files for other purposes. Note that not all 'added_media_file' events originated here, they are also generated by the latex subsystem, or by checking for files which were modified outside of Mnemosyne. """ for match in re_src.finditer("".join(fact.data.values())): filename = match.group(2) if filename.startswith("http:"): continue if len(filename) > 200: self.main_widget().show_information( _("Media filename rather long. This could cause problems using this file on a different OS.") ) if "#" in filename: self.main_widget().show_information( _("Filename contains '#', which could cause problems on some operating systems.") ) if not path_exists(filename) and not path_exists(expand_path(filename, self.media_dir())): self.main_widget().show_error(_("Missing media file!") + "\n\n" + filename) for fact_key, value in fact.data.iteritems(): fact.data[fact_key] = fact.data[fact_key].replace( match.group(), 'src_missing="%s"' % match.group(2) ) continue # If needed, copy file to the media dir. Normally this happens when # the user clicks 'Add image' e.g., but he could have typed in the # full path directly. if os.path.isabs(filename): filename = copy_file_to_dir(filename, self.media_dir()) else: # We always store Unix paths internally. filename = filename.replace("\\", "/") for fact_key, value in fact.data.iteritems(): fact.data[fact_key] = value.replace(match.group(2), filename) self.con.execute( """update data_for_fact set value=? where _fact_id=? and key=?""", (fact.data[fact_key], fact._id, fact_key), ) if self.con.execute("select count() from media where filename=?", (filename,)).fetchone()[0] == 0: self.con.execute( """insert into media(filename, _hash) values(?,?)""", (filename, self._media_hash(filename)), ) # When we are applying log entries during sync or import, the # side effects of e.g. ADDED_FACT events should not generate # additional ADDED_MEDIA_FILE events at the remote partner, so # we disable the logging of these side effects in that case. if not self.syncing and not self.importing: self.log().added_media_file(filename)
def new(self, path): if self.is_loaded(): self.unload() path = expand_path(path, self.config().basedir) self.load_failed = False self.save(contract_path(path, self.config().basedir)) self.config()["path"] = contract_path(path, self.config().basedir) self.log().new_database()
def preprocess_media(self, fact_data, tag_names): missing_media = False media_dir = self.database().media_dir() # os.path.normpath does not convert Windows separators to Unix # separators, so we need to make sure we internally store Unix paths. for fact_key in fact_data: for match in re_src.finditer(fact_data[fact_key]): fact_data[fact_key] = \ fact_data[fact_key].replace(match.group(), match.group().replace("\\", "/")) # Convert sound tags to audio tags. for fact_key in fact_data: for match in re_sound.finditer(fact_data[fact_key]): fact_data[fact_key] = fact_data[fact_key].replace(match.group(), match.group().replace("<sound src", "<audio src")) # Copy files to media directory, creating subdirectories as we go. # For missing media, we change the tag to scr_missing, which makes it # easier for the user to identify the problem if there is more than 1 # media file missing for a card. for fact_key in fact_data: for match in re_src.finditer(fact_data[fact_key]): filename = match.group(1) if not os.path.exists(filename) \ and not os.path.exists(\ expand_path(filename, self.import_dir)) \ and not os.path.exists(\ expand_path(filename, self.database().media_dir())): fact_data[fact_key] = \ fact_data[fact_key].replace(match.group(), "src_missing=\"%s\"" % filename) missing_media = True continue if not os.path.isabs(filename) and not os.path.exists(\ expand_path(filename, self.database().media_dir())): source = expand_path(filename, self.import_dir) dest = expand_path(filename, media_dir) directory = os.path.dirname(dest) if not os.path.exists(directory): os.makedirs(directory) copy(source, dest) if missing_media: tag_names.append(_("MISSING_MEDIA")) if not self.warned_about_missing_media: self.main_widget().show_information(\ _("Warning: media files were missing. These cards have been tagged as MISSING_MEDIA. You must also change 'src_missing' to 'src' in the text of these cards.")) self.warned_about_missing_media = True
def all_media_filenames(self): """Determine all media files, for use in the initial full sync.""" filenames = set() for filename in [cursor[0] for cursor in self.con.execute(\ """select object_id from log where event_type=? or event_type=?""", (EventTypes.ADDED_MEDIA_FILE, EventTypes.EDITED_MEDIA_FILE))]: try: if os.path.exists(expand_path(filename, self.media_dir())): filenames.add(filename) except UnicodeError: # Workaround Android encoding issue. if os.path.exists(expand_path(filename, self.media_dir()).encode("utf-8")): filenames.add(filename) return filenames
def show_open_file_dialog(self): self.stopwatch().pause() self.flush_sync_server() db = self.database() data_dir = self.config().data_dir old_path = expand_path(self.config()["last_database"], data_dir) filename = self.main_widget().get_filename_to_open( path=old_path, filter=_("Mnemosyne databases") + " (*%s)" % db.suffix) if not filename: self.stopwatch().unpause() return if filename.endswith(".cards"): self.stopwatch().unpause() self.main_widget().show_information(\ _("'*.cards' files are not separate databases, but need to be imported in your current database through 'File - Import'.")) return if filename.endswith("config.db"): self.stopwatch().unpause() self.main_widget().show_information(\ _("The configuration database is not used to store cards.")) return if os.path.normpath(filename).startswith(\ os.path.normpath(os.path.join(data_dir, "backups"))): result = self.main_widget().show_question(\ _("Do you want to replace your current database with one restored from this backup?\nNote that this will result in conflicts during the next sync, which need to be resolved by a full sync."), _("Yes"), _("No"), "") if result == 0: # Yes. # Note that we don't save the current database first in this # case, as the user wants to throw it away. This mainly # prohibits dumping to the science log. db.restore(filename) self.reset_study_mode() self.update_title() self.stopwatch().unpause() return if self.database().is_loaded(): try: self.log().saved_database() db.backup() db.unload() except RuntimeError as error: self.main_widget().show_error(str(error)) self.stopwatch().unpause() return try: db.load(filename) self.log().loaded_database() self.log().future_schedule() except Exception as error: self.main_widget().show_error(str(error)) db.abandon() db.load(old_path) self.stopwatch().unpause() return self.reset_study_mode() self.update_title() self.stopwatch().unpause()
def do_import(self, filename, extra_tag_names=None): db = self.database() if filename.endswith("config.db"): self.main_widget().show_error(\ _("The configuration database is not used to store cards.")) return data_dir = self.config().data_dir receiving_database_filename = \ expand_path(self.config()["last_database"], data_dir) db.dump_to_science_log() # Heuristic to check if we haven't imported this database before. current_tag_ids = set([tag.id for tag in db.tags()]) db.load(filename) tag_ids_to_import = set([tag.id for tag in db.tags()]) if len(tag_ids_to_import.intersection(current_tag_ids)) >= 2: answer = self.main_widget().show_question(\ _("It looks like you've imported this database before. Importing it twice will generate duplicate log entries, which will skew your statistics. Do you want to continue?"), _("Abort"), _("Continue"), "") if answer == 0: db.load(receiving_database_filename) return # Export to temporary *.cards file. cards_format = Mnemosyne2Cards(self.component_manager) tmp_cards_filename = os.path.join(data_dir, "TMP.cards") cards_format.do_export(tmp_cards_filename, used_for_merging_dbs=True) old_deactivated_card_type_fact_view_ids = \ db.current_criterion().deactivated_card_type_fact_view_ids user_card_types = [card_type for card_type in db.card_types_in_use() \ if db.is_user_card_type(card_type)] # Get config info to be merged. old_config_dir = self.config().config_dir self.config().config_dir = os.path.dirname(filename) self.log().active = False self.config().load() old_config = self.config().copy() self.config().config_dir = old_config_dir self.config().load() self.log().active = True # Import the *.cards file into the receiving database. db.load(receiving_database_filename) log_index_before_import = db.current_log_index() db.importing_with_learning_data = True cards_format.do_import(\ tmp_cards_filename, extra_tag_names, show_metadata=False) db.importing_with_learning_data = False db.merge_logs_from_other_database(filename, log_index_before_import) os.remove(tmp_cards_filename) db.current_criterion().deactivated_card_type_fact_view_ids.update(\ old_deactivated_card_type_fact_view_ids) db.set_current_criterion(db.current_criterion()) for property_name in ["background_colour", "font", "font_colour", "alignment", "hide_pronunciation_field"]: self.log().edited_setting(property_name) for card_type in user_card_types: if card_type.id in old_config[property_name]: self.config()[property_name][card_type.id] = \ old_config[property_name][card_type.id] db.skip_science_log()
def all_media_filenames(self): filenames = [] for filename in [cursor[0] for cursor in self.con.execute(\ """select object_id from log where event_type=? or event_type=?""", (EventTypes.ADDED_MEDIA, EventTypes.UPDATED_MEDIA))]: if os.path.exists(expand_path(filename, self.mediadir())): filenames.append(filename) return filenames
def insert_sound(self): path = expand_path(config()["import_sound_dir"]) fname = unicode(QFileDialog.getOpenFileName(self, _("Insert sound"), path, _("Sound files") + \ " (*.wav *.mp3 *.ogg *.WAV *.MP3 *.OGG)")) if fname: self.insertPlainText("<sound src=\""+contract_path(fname)+"\">") config()["import_sound_dir"] = \ contract_path(os.path.dirname(fname))
def _process_media(self, fact): """Copy the media files to the media directory and edit the media table. We don't keep track of which facts use which media and delete a media file if it's no longer in use. The reason for this is that some people use the media directory as their only location to store their media files, and also use these files for other purposes. Note that not all 'added_media_file' events originated here, they are also generated by the latex subsystem, or by checking for files which were modified outside of Mnemosyne. """ for match in re_src.finditer("".join(fact.data.values())): filename = match.group(2) if filename.startswith("http:"): continue if len(filename) > 200: self.main_widget().show_information(\ _("Media filename rather long. This could cause problems using this file on a different OS.")) if "#" in filename: self.main_widget().show_information(\ _("Filename contains '#', which could cause problems on some operating systems.")) if not os.path.exists(filename) and \ not os.path.exists(expand_path(filename, self.media_dir())): self.main_widget().show_error( _("Missing media file!") + "\n\n" + filename) for fact_key, value in fact.data.iteritems(): fact.data[fact_key] = \ fact.data[fact_key].replace(match.group(), "src_missing=\"%s\"" % match.group(2)) continue # If needed, copy file to the media dir. Normally this happens when # the user clicks 'Add image' e.g., but he could have typed in the # full path directly. if os.path.isabs(filename): filename = copy_file_to_dir(filename, self.media_dir()) else: # We always store Unix paths internally. filename = filename.replace("\\", "/") for fact_key, value in fact.data.iteritems(): fact.data[fact_key] = value.replace(match.group(2), filename) self.con.execute( """update data_for_fact set value=? where _fact_id=? and key=?""", (fact.data[fact_key], fact._id, fact_key)) if self.con.execute("select count() from media where filename=?", (filename, )).fetchone()[0] == 0: self.con.execute( """insert into media(filename, _hash) values(?,?)""", (filename, self._media_hash(filename))) # When we are applying log entries during sync or import, the # side effects of e.g. ADDED_FACT events should not generate # additional ADDED_MEDIA_FILE events at the remote partner, so # we disable the logging of these side effects in that case. if not self.syncing and not self.importing: self.log().added_media_file(filename)
def test_save_as(self): fact_data = {"f": "question", "b": "answer"} card_type = self.card_type_with_id("1") self.controller().create_new_cards(fact_data, card_type, grade=-1, tag_names=["default"]) new_name = self.config()["last_database"] + ".bak" assert self.database().save(self.config()["last_database"] + ".bak") != -1 assert self.config()["last_database"] == new_name assert new_name != expand_path(new_name, self.config().data_dir)
def insert_img(self): path = expand_path(config()["import_img_dir"]) fname = unicode(QFileDialog.getOpenFileName(self, _("Insert image"), path, _("Image files") + \ " (*.png *.gif *.jpg *.bmp *.jpeg" + \ " *.PNG *.GIF *.jpg *.BMP *.JPEG)")) if fname: self.insertPlainText("<img src=\""+contract_path(fname)+"\">") config()["import_img_dir"] = \ contract_path(os.path.dirname(fname))
def load(self, path): if self.is_loaded(): self.unload() self._path = expand_path(path, self.config().basedir) # checking database format and convert to current format if needed. fixer = DBFixer(self, self.component_manager) fixer.fix() del fixer # Check database version. try: sql_res = self.con.execute("""select value from global_variables where key=?""", ("version", )).fetchone() self.load_failed = False except sqlite3.OperationalError: self.main_widget().error_box( _("Another copy of Mnemosyne is still running.") + "\n" + _("Continuing is impossible and will lead to data loss!")) sys.exit() except: self.load_failed = True raise RuntimeError, _("Unable to load file.") if sql_res["value"] != self.version: self.load_failed = True raise RuntimeError, \ _("Unable to load file: database version mismatch.") # Instantiate card types stored in this database. for cursor in self.con.execute("select id from card_types"): id = cursor[0] card_type = self.get_card_type(id, id_is_internal=-1) self.component_manager.register(card_type) # Identify missing plugins for card types and their parents. plugin_needed = set() active_ids = set(card_type.id for card_type in self.card_types()) for cursor in self.con.execute("""select distinct card_type_id from facts"""): id = cursor[0] while "::" in id: # Move up one level of the hierarchy. id, child_name = id.rsplit("::", 1) if id not in active_ids: plugin_needed.add(id) if id not in active_ids: plugin_needed.add(id) for card_type_id in plugin_needed: try: self._activate_plugin_for_card_type(card_type_id) except RuntimeError, exception: self._connection.close() self._connection = None self.load_failed = True raise exception
def default_filename(self, card_type, foreign_text): if foreign_text.count(" ") <= 1: filename = foreign_text.replace("?", "").replace("/", "")\ .replace("\\", "") + ".mp3" else: filename = datetime.datetime.today().strftime("%Y%m%d.mp3") local_dir = self.config()["tts_dir_for_card_type_id"]\ .get(card_type.id, "") filename = os.path.join(local_dir, filename) full_path = expand_path(filename, self.database().media_dir()) full_path = make_filename_unique(full_path) filename = contract_path(full_path, self.database().media_dir()) return filename
def run(self, text, fact): i = text.lower().find("src") while i != -1: start = text.find('"', i) end = text.find('"', start + 1) if end == -1: break old_path = text[start + 1 : end] text = text[: start + 1] + "file:\\\\" + expand_path(old_path) + text[end:] # Since text is always longer now, we can start searching # from the previous end tag. i = text.lower().find("src", end + 1) return text
def insert_video(self, filter): from mnemosyne.libmnemosyne.utils import copy_file_to_dir basedir, mediadir = self.config().basedir, self.database().mediadir() path = expand_path(self.config()["import_video_dir"], basedir) filter = _("Video files") + " " + filter filename = self.main_widget().open_file_dialog(path, filter, _("Insert video")) if not filename: return "" else: self.config()["import_video_dir"] = contract_path(os.path.dirname(filename), basedir) filename = copy_file_to_dir(filename, mediadir) return filename
def check_for_edited_media_files(self): # Regular media files. new_hashes = {} for sql_res in self.con.execute("select filename, _hash from media"): filename, hash = sql_res[0], sql_res[1] if not path_exists(expand_path(filename, self.media_dir())): continue new_hash = self._media_hash(filename) if hash != new_hash: new_hashes[filename] = new_hash for filename, new_hash in new_hashes.iteritems(): self.con.execute("update media set _hash=? where filename=?", (new_hash, filename)) self.log().edited_media_file(filename)
def download_tmp_audio_file(self, card_type, foreign_text): """Returns a temporary filename with the audio.""" language_id = self.config().card_type_property(\ "sublanguage_id", card_type) if not language_id: language_id = self.config().card_type_property(\ "language_id", card_type) tts = gTTS(foreign_text, language_id) filename = expand_path("__GTTS__TMP__.mp3", self.database().media_dir()) tts.save(filename) return filename
def run(self, text): i = text.lower().find("src") while i != -1: start = text.find("\"", i) end = text.find("\"", start+1) if start == -1 or end == -1: break old_path = text[start+1:end] text = text[:start+1] + "file:\\\\" + expand_path(old_path, self.database().mediadir()) + text[end:] # Since text is always longer now, we can start searching # from the previous end tag. i = text.lower().find("src", end+1) return text
def media_filenames_to_sync_for(self, partner): # Note that Mnemosyne does not delete media files on its own, so # DELETED_MEDIA log entries are irrelevant/ignored. # We do have to make sure we don't return any files that have been # deleted, though. _id = self.last_log_index_synced_for(partner) filenames = [] for filename in [cursor[0] for cursor in self.con.execute(\ """select object_id from log where _id>? and (event_type=? or event_type=?)""", (_id, EventTypes.ADDED_MEDIA, EventTypes.UPDATED_MEDIA))]: if os.path.exists(expand_path(filename, self.mediadir())): filenames.append(filename) return filenames
def check_for_edited_media_files(self): # Regular media files. new_hashes = {} for sql_res in self.con.execute("select filename, _hash from media"): filename, hash = sql_res[0], sql_res[1] if not os.path.exists(expand_path(filename, self.media_dir())): continue new_hash = self._media_hash(filename) if hash != new_hash: new_hashes[filename] = new_hash for filename, new_hash in new_hashes.iteritems(): self.con.execute("update media set _hash=? where filename=?", (new_hash, filename)) self.log().edited_media_file(filename)
def show_insert_flash_dialog(self, filter): from mnemosyne.libmnemosyne.utils import copy_file_to_dir data_dir, media_dir = self.config().data_dir, self.database().media_dir() path = expand_path(self.config()["import_flash_dir"], data_dir) filter = _("Flash files") + " " + filter filename = self.main_widget().get_filename_to_open(\ path, filter, _("Insert Flash")) if not filename: return "" else: self.config()["import_flash_dir"] = contract_path(\ os.path.dirname(filename), data_dir) filename = copy_file_to_dir(filename, media_dir) return filename
def file_open(self): stopwatch.pause() old_path = expand_path(config()["path"]) out = self.widget.open_file_dialog(path=old_path, filter=_("Mnemosyne databases (*.mem)")) if not out: stopwatch.unpause() return try: database().unload() except MnemosyneError, e: self.widget.error_box(e) stopwatch.unpause() return
def load_database(self, database_name): if self.server_only: # First see if web server needs to release database. try: con = httplib.HTTPConnection("127.0.0.1", self.config()["web_server_port"]) con.request("GET", "/release_database") response = con.getresponse() except: pass if not os.path.exists(expand_path(database_name, self.config().data_dir)): self.database().new(database_name) else: self.database().load(database_name) else: self.previous_database = self.config()["last_database"] if self.previous_database != database_name: if not os.path.exists(expand_path(database_name, self.config().data_dir)): self.database().new(database_name) else: self.database().load(database_name) return self.database()
def add_media(self, log_entry): """ADDED_MEDIA events get created in several places: database._process_media, database.check_for_updated_media_files, latex, ... . In order to make sure that all of these are treated in the same way, we generate an ADDED_MEDIA event here, and prevent _process_media from generating this event through self.syncing = True. """ filename = log_entry["fname"] if os.path.exists(expand_path(filename, self.mediadir())): self.con.execute("""insert or replace into media(filename, _hash) values(?,?)""", (filename, self._media_hash(filename))) self.log().added_media(filename)
def media_filenames_to_sync_for(self, partner): """Determine which media files need to be sent across during the sync. Obviously, this only includes existing media files, not deleted ones. """ _id = self.last_log_index_synced_for(partner) filenames = set() for filename in [cursor[0] for cursor in self.con.execute(\ """select object_id from log where _id>? and (event_type=? or event_type=?)""", (_id, EventTypes.ADDED_MEDIA_FILE, EventTypes.EDITED_MEDIA_FILE))]: if os.path.exists(\ normalise_path(expand_path(filename, self.media_dir()))): filenames.add(filename) return filenames
def add_media_file(self, log_entry): """ADDED_MEDIA_FILE events get created in several places: database._process_media, database.check_for_edited_media_files, latex, ... . In order to make sure that all of these are treated in the same way, we generate an ADDED_MEDIA_FILE event here, and prevent _process_media from generating this event through self.syncing = True. """ filename = log_entry["fname"] if os.path.exists(expand_path(filename, self.media_dir())): self.con.execute("""insert or replace into media(filename, _hash) values(?,?)""", (filename, self._media_hash(filename))) self.log().added_media_file(filename)
def load(self, fname): """Load database from file.""" # Unload opened database if exists self.unload() self.path = expand_path(fname, config().basedir) try: res = self.conn.execute("select value from meta where key=?", ("start_date", )).fetchone() self.load_failed = False self.set_start_date(StartDate(datetime.strptime(res["value"], "%Y-%m-%d %H:%M:%S"))) self.load_failed = False except sqlite.OperationalError: self.load_failed = True
def save(self, path=None): # Don't erase a database which failed to load. if self.load_failed == True: return -1 # Update format. self.con.execute("update global_variables set value=? where key=?", (self.version, "version" )) # Save database and copy it to different location if needed. self.con.commit() if not path: return dest_path = expand_path(path, self.config().basedir) if dest_path != self._path: shutil.copy(self._path, dest_path) self._path = dest_path self.config()["path"] = contract_path(path, self.config().basedir)
def file_save_as(self): stopwatch.pause() old_path = expand_path(config()["path"]) out = self.widget.save_file_dialog(path=old_path, filter=_("Mnemosyne databases (*.mem)")) if not out: stopwatch.unpause() return if not out.endswith(".mem"): out += ".mem" try: database().save(out) except MnemosyneError, e: self.widget.error_box(e) stopwatch.unpause() return
def download_tmp_audio_file(self, card_type, foreign_text): """Returns a temporary filename with the audio.""" language_id = self.config().card_type_property(\ "sublanguage_id", card_type) if not language_id: language_id = self.config().card_type_property(\ "language_id", card_type) foreign_text = urllib.parse.quote(foreign_text.encode("utf-8")) headers = {'User-Agent': 'Mozilla/5.0'} url = "https://code.responsivevoice.org/getvoice.php?t=%s&tl=%s" \ % (foreign_text, language_id) req = urllib.request.Request(url=url, headers=headers) with urllib.request.urlopen(req) as response: data = response.read() filename = expand_path("__GTTS__TMP__.mp3", self.database().media_dir()) with open(filename, 'wb') as mp3_file: mp3_file.write(data) return filename
def delete_unused_media_files(self, unused_files): """Delete media files which are no longer in use. 'unused_files' should be a subset of 'self.unused_media_files', because here we no longer check if these media files are used or not. """ for filename in unused_files: os.remove(expand_path(filename, self.media_dir())) self.log().deleted_media_file(filename) # Purge empty dirs. for root, dirnames, filenames in \ os.walk(self.media_dir(), topdown=False): contracted_root = contract_path(root, self.media_dir()) if not contracted_root or contracted_root.startswith("_"): continue if len(filenames) == 0 and len(dirnames) == 0: os.rmdir(root) # Other media files, e.g. latex. for f in self.component_manager.all("hook", "delete_unused_media_files"): f.run() remove_empty_dirs_in(self.media_dir())
def download_tmp_audio_file(self, card_type, foreign_text): """Returns a temporary filename with the audio.""" language_id = self.config().card_type_property(\ "sublanguage_id", card_type) if not language_id: language_id = self.config().card_type_property(\ "language_id", card_type) client = texttospeech.TextToSpeechClient() synthesis_input = texttospeech.types.SynthesisInput(ssml=foreign_text) voice = texttospeech.types.VoiceSelectionParams( language_code=language_id, ssml_gender=texttospeech.enums.SsmlVoiceGender.FEMALE) audio_config = texttospeech.types.AudioConfig( audio_encoding=texttospeech.enums.AudioEncoding.MP3) response = client.synthesize_speech(synthesis_input, voice, audio_config) filename = expand_path("__GTTS__TMP__.mp3", self.database().media_dir()) with open(filename, 'wb') as mp3_file: mp3_file.write(response.audio_content) return filename
class Upgrade1(Component): """Upgrade from 1.x to 2.x.""" def run(self): # pragma: no cover join = os.path.join # Only do this upgrade once. if not self.database().is_empty(): return # Determine old data_dir. home = os.path.expanduser("~") if sys.platform == "darwin": # This is where backup_old_dir put the old data dir. old_data_dir = join(unicode(home), "Library", "Mnemosyne_1") else: try: home = home.decode(locale.getdefaultlocale()[1]) except: pass old_data_dir = join(home, ".mnemosyne") # We split off the rest to a separate function for testability. if os.path.exists(old_data_dir): self.upgrade_from_old_data_dir(old_data_dir) def backup_old_dir(self): # pragma: no cover join = os.path.join # We only do this on OSX, since on the other platforms, we use a # different directory anyway. if sys.platform == "darwin": home = os.path.expanduser("~") old_data_dir = join(unicode(home), "Library", "Mnemosyne") backup_dir = join(unicode(home), "Library", "Mnemosyne_1") # Work around os.path.exists seeming to give wrong results on # OSX 10.6 (but not 10.7). if os.path.exists(join(old_data_dir, "default.db")): # Data was already backed up. return if os.path.exists(old_data_dir): if not os.path.exists(backup_dir): old_files = sorted(os.listdir(old_data_dir)) shutil.move(old_data_dir, backup_dir) new_files = sorted(os.listdir(backup_dir)) assert old_files == new_files self.main_widget().show_information(\ _("Your old 1.x files are now stored here:\n\n" + backup_dir)) else: self.main_widget().show_error(\ _("Tried to backup your old 1.x files to %s, but that directory already exists.") \ % (backup_dir,)) sys.exit() def upgrade_from_old_data_dir(self, old_data_dir): join = os.path.join try: old_data_dir = unicode(old_data_dir) except: old_data_dir = unicode(old_data_dir, "mbcs") # Warn people that this directory is no longer used. file(join(old_data_dir, "DIRECTORY_NO_LONGER_USED_BY_MNEMOSYNE2"), "w").close() # Read old configuration. old_config = {} config_file = file(join(old_data_dir, "config"), "rb") for key, value in cPickle.load(config_file).iteritems(): old_config[key] = value # Migrate configuration settings. if "user_id" in old_config: self.config()["user_id"] = old_config["user_id"] if "upload_logs" in old_config: self.config()["upload_science_logs"] = old_config["upload_logs"] if "non_latin_font_size_increase" in old_config: self.config()["non_latin_font_size_increase"] \ = old_config["non_latin_font_size_increase"] for card_type in self.card_types(): if "QA_font" in old_config: self.config().set_card_type_property("font", old_config["QA_font"], card_type) if "left_align" in old_config and old_config["left_align"]: for card_type in self.card_types(): self.config().set_card_type_property("alignment", "left", card_type) # Migrate latex settings. setting_for_file = { "dvipng": "dvipng", "preamble": "latex_preamble", "postamble": "latex_postamble" } for filename, setting in setting_for_file.iteritems(): full_filename = join(old_data_dir, "latex", filename) self.config()[setting] = "" if os.path.exists(full_filename): for line in file(full_filename): self.config()[setting] += line # Copy over everything that does not interfere with Mnemosyne 2. new_data_dir = self.config().data_dir new_media_dir = self.database().media_dir() shutil.rmtree(join(new_data_dir, "history")) names = [name for name in os.listdir(old_data_dir) if name not in ["backups", "config", "config.py", "config.pyc", "DIRECTORY_NO_LONGER_USED_BY_MNEMOSYNE2", "error_log.txt", "latex", "plugins", "log.txt", "history"] \ and not name.endswith(".mem") and not name is None] self.main_widget().set_progress_text(_("Copying files from 1.x...")) # By copying over the history folder and log.txt, we also completely # preserve the state of all the files that need to uploaded to the # science server. self.main_widget().set_progress_range(len(names) + 2) if os.path.exists(join(old_data_dir, "history")): shutil.copytree(join(old_data_dir, "history"), join(new_data_dir, "history")) self.main_widget().increase_progress(1) shutil.copyfile(join(old_data_dir, "log.txt"), join(new_data_dir, "log.txt")) self.main_widget().increase_progress(1) # We copy all the other files to the media directory. In this way, # if there are media files that are not explicitly referenced in the # cards, it will be easier for the user to fix his path errors after # the upgrade. for name in names: if os.path.isdir(join(old_data_dir, name)): try: shutil.copytree(join(old_data_dir, name), join(new_media_dir, name)) except OSError, e: # https://bugs.launchpad.net/mnemosyne-proj/+bug/1210435 import errno if e.errno != errno.EEXIST: raise e self.main_widget().show_information(\ "Skipping copying of %s because it already exists.") \ % (name, ) else: shutil.copyfile(join(old_data_dir, name), join(new_media_dir, name)) self.main_widget().increase_progress(1) # Upgrade database. old_database = expand_path("default.mem", old_data_dir) for format in self.component_manager.all("file_format"): if format.__class__.__name__ == "Mnemosyne1Mem": format.do_import(old_database) self.review_controller().reset() # Give info to the user. info = _("Upgrade from Mnemosyne 1.x complete!") + "\n\n" info += _("Mnemosyne 2.x now stores its data here:") + "\n\n" info += self.config().data_dir + "\n" if self.config().config_dir != \ self.config().data_dir: # pragma: no cover # Only happens on Linux, outside of the test suite. info += self.config().config_dir self.main_widget().show_information(info)
def upgrade_from_old_data_dir(self, old_data_dir): join = os.path.join # Warn people that this directory is no longer used. open(join(old_data_dir, "DIRECTORY_NO_LONGER_USED_BY_MNEMOSYNE2"), "w").close() # Read old configuration. old_config = {} config_file = open(join(old_data_dir, "config"), "rb") import pickle for key, value in pickle.load(config_file).items(): old_config[key] = value # Migrate configuration settings. if "user_id" in old_config: self.config()["user_id"] = old_config["user_id"] if "upload_logs" in old_config: self.config()["upload_science_logs"] = old_config["upload_logs"] if "non_latin_font_size_increase" in old_config: self.config()["non_latin_font_size_increase"] \ = old_config["non_latin_font_size_increase"] for card_type in self.card_types(): if "QA_font" in old_config: self.config().set_card_type_property("font", old_config["QA_font"], card_type) if "left_align" in old_config and old_config["left_align"]: for card_type in self.card_types(): self.config().set_card_type_property("alignment", "left", card_type) # Migrate latex settings. setting_for_file = { "dvipng": "dvipng", "preamble": "latex_preamble", "postamble": "latex_postamble" } for filename, setting in setting_for_file.items(): full_filename = join(old_data_dir, "latex", filename) self.config()[setting] = "" if os.path.exists(full_filename): for line in open(full_filename): self.config()[setting] += line # Copy over everything that does not interfere with Mnemosyne 2. new_data_dir = self.config().data_dir new_media_dir = self.database().media_dir() import shutil shutil.rmtree(join(new_data_dir, "history")) names = [name for name in os.listdir(old_data_dir) if name not in ["backups", "config", "config.py", "config.pyc", "DIRECTORY_NO_LONGER_USED_BY_MNEMOSYNE2", "error_log.txt", "latex", "plugins", "log.txt", "history"] \ and not name.endswith(".mem") and not name is None] self.main_widget().set_progress_text(_("Copying files from 1.x...")) # By copying over the history folder and log.txt, we also completely # preserve the state of all the files that need to uploaded to the # science server. self.main_widget().set_progress_range(len(names) + 2) if os.path.exists(join(old_data_dir, "history")): shutil.copytree(join(old_data_dir, "history"), join(new_data_dir, "history")) self.main_widget().increase_progress(1) shutil.copyfile(join(old_data_dir, "log.txt"), join(new_data_dir, "log.txt")) self.main_widget().increase_progress(1) # We copy all the other files to the media directory. In this way, # if there are media files that are not explicitly referenced in the # cards, it will be easier for the user to fix his path errors after # the upgrade. for name in names: if os.path.isdir(join(old_data_dir, name)): try: shutil.copytree(join(old_data_dir, name), join(new_media_dir, name)) except OSError as e: # https://bugs.launchpad.net/mnemosyne-proj/+bug/1210435 import errno if e.errno != errno.EEXIST: raise e self.main_widget().show_information(\ "Skipping copying of %s because it already exists.") \ % (name, ) else: shutil.copyfile(join(old_data_dir, name), join(new_media_dir, name)) self.main_widget().increase_progress(1) # Upgrade database. old_database = expand_path("default.mem", old_data_dir) for format in self.component_manager.all("file_format"): if format.__class__.__name__ == "Mnemosyne1Mem": format.do_import(old_database) self.controller().reset_study_mode() # Give info to the user. info = _("Upgrade from Mnemosyne 1.x complete!") + "\n\n" info += _("Mnemosyne 2.x now stores its data here:") + "\n\n" info += self.config().data_dir + "\n" if self.config().config_dir != \ self.config().data_dir: # pragma: no cover # Only happens on Linux, outside of the test suite. info += self.config().config_dir self.main_widget().show_information(info)