class MyServer(Server): program_name = "Mnemosyne" program_version = "test" capabilities = "TODO" def __init__(self): shutil.rmtree(os.path.abspath("dot_sync_server"), ignore_errors=True) self.mnemosyne = Mnemosyne(upload_science_logs=False, interested_in_old_reps=True) self.mnemosyne.components.insert( 0, ("mnemosyne.libmnemosyne.gui_translator", "GetTextGuiTranslator")) self.mnemosyne.components.append(("test_sync", "Widget")) self.mnemosyne.gui_for_component["ScheduledForgottenNew"] = \ [("mnemosyne_test", "TestReviewWidget")] self.mnemosyne.initialise(os.path.abspath("dot_sync_server"), automatic_upgrades=False) self.mnemosyne.config().change_user_id("user_id") self.mnemosyne.review_controller().reset() self.supports_binary_transfer = lambda x: False # Add 20 cards to database. card_type = self.mnemosyne.card_type_with_id("1") for i in range(20): fact_data = {"f": "question %d" % (i, ), "b": "answer"} self.mnemosyne.controller().create_new_cards(fact_data, card_type, grade=-1, tag_names=["default" ])[0] self.mnemosyne.database().save() self.mnemosyne.database().release_connection() def authorise(self, login, password): return login == "user" and password == "pass" def load_database(self, database_name): self.mnemosyne.database().load(database_name) return self.mnemosyne.database() def unload_database(self, database): self.mnemosyne.database().release_connection() # Dirty way to make sure we restart the server and create a new database # (as opposed to keep sending old history back and forth)' self.wsgi_server.stop() def run(self): Server.__init__(self, "client_machine_id", 8186, self.mnemosyne.main_widget()) self.serve_until_stopped()
class MyServer(Server): program_name = "Mnemosyne" program_version = "test" capabilities = "TODO" def __init__(self): shutil.rmtree(os.path.abspath("dot_sync_server"), ignore_errors=True) self.mnemosyne = Mnemosyne(upload_science_logs=False, interested_in_old_reps=True) self.mnemosyne.components.insert(0, ("mnemosyne.libmnemosyne.translator", "GetTextTranslator")) self.mnemosyne.components.append(("test_sync", "Widget")) self.mnemosyne.components.append(("mnemosyne_test", "TestReviewWidget")) self.mnemosyne.initialise(os.path.abspath("dot_sync_server"), automatic_upgrades=False) self.mnemosyne.config().change_user_id("user_id") self.mnemosyne.review_controller().reset() self.supports_binary_transfer = lambda x : False # Add 20 cards to database. card_type = self.mnemosyne.card_type_with_id("1") for i in range (20): fact_data = {"f": "question %d" % (i,), "b": "answer"} self.mnemosyne.controller().create_new_cards(fact_data, card_type, grade=-1, tag_names=["default"])[0] self.mnemosyne.database().save() self.mnemosyne.database().release_connection() def authorise(self, login, password): return login == "user" and password == "pass" def load_database(self, database_name): self.mnemosyne.database().load(database_name) return self.mnemosyne.database() def unload_database(self, database): self.mnemosyne.database().release_connection() # Dirty way to make sure we restart the server and create a new database # (as opposed to keep sending old history back and forth)' self.wsgi_server.stop() def run(self): Server.__init__(self, "client_machine_id", 8186, self.mnemosyne.main_widget()) self.serve_until_stopped()
class MyServer(Server): program_name = "Mnemosyne" program_version = "test" capabilities = "TODO" stop_after_sync = True def __init__(self): os.system("rm -rf sync_from_here") self.mnemosyne = Mnemosyne() self.mnemosyne.components.insert(0, ("mnemosyne.libmnemosyne.translator", "GetTextTranslator")) self.mnemosyne.components.append(("test_sync", "Widget")) self.mnemosyne.components.append(\ ("mnemosyne.libmnemosyne.ui_components.dialogs", "ProgressDialog")) self.mnemosyne.components.append(\ ("mnemosyne.libmnemosyne.ui_components.review_widget", "ReviewWidget")) self.mnemosyne.initialise(os.path.abspath("sync_from_here")) self.mnemosyne.config().change_user_id("user_id") self.mnemosyne.review_controller().reset() # Add 20 cards to database. card_type = self.mnemosyne.card_type_by_id("1") for i in range (20): fact_data = {"q": "question %d" % (i,), "a": "answer"} self.mnemosyne.controller().create_new_cards(fact_data, card_type, grade=-1, tag_names=["default"])[0] self.mnemosyne.database().save() def authorise(self, login, password): return login == "user" and password == "pass" def open_database(self, database_name): return self.mnemosyne.database() def run(self): Server.__init__(self, "client_machine_id", "192.168.2.54", 8186, self.mnemosyne.main_widget()) # Because we stop_after_sync is True, serve_forever will actually stop # after one sync. self.serve_forever() self.mnemosyne.finalise()
class TestLogging(MnemosyneTest): def restart(self): self.mnemosyne = Mnemosyne(upload_science_logs=False, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert( 0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.components.append(\ ("test_logging", "MyMainWidget")) self.mnemosyne.gui_for_component["ScheduledForgottenNew"] = \ [("mnemosyne_test", "TestReviewWidget")] self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) def test_logging(self): card_type = self.card_type_with_id("1") fact_data = {"f": "1", "b": "b"} card = self.controller().create_new_cards(fact_data, card_type, grade=-1, tag_names=["default"])[0] card_id_1 = card.id self.review_controller().show_new_question() self.review_controller().grade_answer(0) self.review_controller().show_new_question() self.review_controller().grade_answer(1) self.review_controller().grade_answer(4) self.mnemosyne.finalise() self.restart() card_type = self.card_type_with_id("1") fact_data = {"f": "2", "b": "b"} card = self.controller().create_new_cards(fact_data, card_type, grade=-1, tag_names=["default"])[0] self.review_controller().show_new_question() self.controller().delete_current_card() self.log().dump_to_science_log() sql_res = self.database().con.execute(\ "select * from log where _id=1").fetchone() assert sql_res[1] == EventTypes.STARTED_PROGRAM sql_res = self.database().con.execute(\ "select * from log where _id=2").fetchone() assert sql_res[1] == EventTypes.STARTED_SCHEDULER sql_res = self.database().con.execute(\ "select * from log where _id=3").fetchone() assert sql_res[1] == EventTypes.LOADED_DATABASE assert sql_res[6] == 0 assert sql_res[7] == 0 assert sql_res[8] == 0 sql_res = self.database().con.execute(\ "select * from log where _id=11").fetchone() assert sql_res[1] == EventTypes.ADDED_TAG assert sql_res[3] is not None sql_res = self.database().con.execute(\ "select * from log where _id=12").fetchone() assert sql_res[1] == EventTypes.EDITED_CRITERION assert sql_res[3] is not None sql_res = self.database().con.execute(\ "select * from log where _id=13").fetchone() assert sql_res[1] == EventTypes.ADDED_FACT assert sql_res[3] is not None sql_res = self.database().con.execute(\ "select * from log where _id=14").fetchone() assert sql_res[1] == EventTypes.ADDED_CARD assert sql_res[3] is not None sql_res = self.database().con.execute(\ "select * from log where _id=15").fetchone() assert sql_res[1] == EventTypes.REPETITION assert sql_res[6] == 1 assert sql_res[7] == 0 assert sql_res[11] == 0 assert sql_res[12] == 0 assert sql_res[14] - sql_res[2] == 0 assert sql_res[13] == 0 assert sql_res[3] is not None sql_res = self.database().con.execute(\ "select * from log where _id=16").fetchone() assert sql_res[1] == EventTypes.REPETITION assert sql_res[6] == 2 assert sql_res[7] == 0 assert sql_res[11] == 0 assert sql_res[12] <= 10 # Depends on CPU load. assert sql_res[14] - sql_res[2] == 0 assert sql_res[13] == 0 sql_res = self.database().con.execute(\ "select * from log where _id=17").fetchone() assert sql_res[1] == EventTypes.REPETITION assert sql_res[6] == 3 assert sql_res[7] == 0 assert sql_res[11] == 0 assert sql_res[12] <= 10 # Depends on CPU load. new_interval = sql_res[14] - sql_res[2] assert new_interval > 0 assert sql_res[13] == 0 sql_res = self.database().con.execute(\ "select * from log where _id=18").fetchone() assert sql_res[1] == EventTypes.SAVED_DATABASE assert sql_res[6] == 0 assert sql_res[7] == 0 assert sql_res[8] == 1 sql_res = self.database().con.execute(\ "select * from log where _id=19").fetchone() assert sql_res[1] == EventTypes.STOPPED_PROGRAM sql_res = self.database().con.execute(\ "select * from log where _id=20").fetchone() assert sql_res[1] == EventTypes.STARTED_PROGRAM sql_res = self.database().con.execute(\ "select * from log where _id=21").fetchone() assert sql_res[1] == EventTypes.STARTED_SCHEDULER sql_res = self.database().con.execute(\ "select * from log where _id=22").fetchone() assert sql_res[1] == EventTypes.LOADED_DATABASE assert sql_res[6] == 0 assert sql_res[7] == 0 assert sql_res[8] == 1 sql_res = self.database().con.execute(\ "select * from log where _id=30").fetchone() assert sql_res[1] == EventTypes.ADDED_FACT assert sql_res[3] is not None sql_res = self.database().con.execute(\ "select * from log where _id=31").fetchone() assert sql_res[1] == EventTypes.ADDED_CARD sql_res = self.database().con.execute(\ "select * from log where _id=32").fetchone() assert sql_res[1] == EventTypes.DELETED_CARD assert sql_res[3] is not None sql_res = self.database().con.execute(\ "select * from log where _id=33").fetchone() assert sql_res[1] == EventTypes.DELETED_FACT assert sql_res[3] is not None self.config()["upload_science_logs"] = True self.database().dump_to_science_log() logfile = os.path.join(os.path.abspath("dot_test"), "log.txt") found = False for line in open(logfile): if "R " + card_id_1 + " 4" in line: found = True assert str(new_interval) + " 0 | 0.0" in line assert found == True def test_unique_index(self): fact_data = {"f": "question", "b": "answer"} card_type_2 = self.card_type_with_id("2") card_1, card_2 = self.controller().create_new_cards( fact_data, card_type_2, grade=-1, tag_names=["default"]) log_index = self.database().con.execute(\ """select _id from log order by _id desc limit 1""").fetchone()[0] # Note: we need to keep the last log entry intact, otherwise indexes # start again at 1 and mess up the sync. self.database().con.execute("""delete from log where _id <?""", (log_index, )) self.database().save() self.database().con.execute("""vacuum""") fact_data = {"f": "question2", "b": "answer2"} card_type_2 = self.card_type_with_id("1") card_1 = self.controller().create_new_cards(fact_data, card_type_2, grade=-1, tag_names=["default"]) assert self.database().con.execute(\ """select _id from log order by _id limit 1""").fetchone()[0] \ == log_index def test_recover_user_id(self): assert self.config()["user_id"] is not None MnemosyneTest.teardown(self) open( os.path.join(os.getcwd(), "dot_test", "history", "userid_001.bz2"), "w") os.remove(os.path.join(os.getcwd(), "dot_test", "config.db")) self.mnemosyne = Mnemosyne(upload_science_logs=False, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert( 0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.components.append(\ ("test_logging", "MyMainWidget")) self.mnemosyne.gui_for_component["ScheduledForgottenNew"] = \ [("mnemosyne_test", "TestReviewWidget")] self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) self.mnemosyne.start_review() assert self.config()["user_id"] == "userid" def test_recover_user_id_2(self): assert self.config()["user_id"] is not None MnemosyneTest.teardown(self) open( os.path.join(os.getcwd(), "dot_test", "history", "userid_machine_001.bz2"), "w") os.remove(os.path.join(os.getcwd(), "dot_test", "config.db")) self.mnemosyne = Mnemosyne(upload_science_logs=False, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert( 0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.components.append(\ ("test_logging", "MyMainWidget")) self.mnemosyne.gui_for_component["ScheduledForgottenNew"] = \ [("mnemosyne_test", "TestReviewWidget")] self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) self.mnemosyne.start_review() assert self.config()["user_id"] == "userid" def test_log_index_of_last_upload_1(self): assert self.log().log_index_of_last_upload() == 0 def test_log_index_of_last_upload_2(self): machine_id = self.config().machine_id() for filename in ["user_001.bz2", "user_%s_2.bz2" % machine_id]: open(os.path.join(os.getcwd(), "dot_test", "history", filename), "w") assert self.log().log_index_of_last_upload() == 2 def test_log_index_of_last_upload_3(self): machine_id = self.config().machine_id() for filename in ["user_001.bz2"]: open(os.path.join(os.getcwd(), "dot_test", "history", filename), "w") assert self.log().log_index_of_last_upload() == 1 def test_log_index_of_last_upload_4(self): machine_id = self.config().machine_id() for filename in ["user_005.bz2"]: open(os.path.join(os.getcwd(), "dot_test", "history", filename), "w") assert self.log().log_index_of_last_upload() == 5 def test_log_index_of_last_upload_5(self): machine_id = self.config().machine_id() for filename in ["user_othermachine_005.bz2"]: open(os.path.join(os.getcwd(), "dot_test", "history", filename), "w") assert self.log().log_index_of_last_upload() == 0 def test_log_index_of_last_upload_6(self): machine_id = self.config().machine_id() for filename in [ "user_othermachine_005.bz2", "user_%s_2.bz2" % machine_id ]: open(os.path.join(os.getcwd(), "dot_test", "history", filename), "w") assert self.log().log_index_of_last_upload() == 2 def test_log_index_of_last_upload_7(self): machine_id = self.config().machine_id() for filename in [ "user_001.bz2", "user_othermachine_005.bz2", "user_%s_2.bz2" % machine_id ]: open(os.path.join(os.getcwd(), "dot_test", "history", filename), "w") assert self.log().log_index_of_last_upload() == 2 def test_log_upload(self): machine_id_file = os.path.join(self.mnemosyne.config().config_dir, "machine.id") f = open(machine_id_file, "w") print("TESTMACHINE", file=f) f.close() self.config().change_user_id("UPLOADTEST") self.config()["max_log_size_before_upload"] = 1 MnemosyneTest.teardown(self) self.mnemosyne = Mnemosyne(upload_science_logs=True, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert( 0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.components.append(\ ("test_logging", "MyMainWidget")) self.mnemosyne.gui_for_component["ScheduledForgottenNew"] = \ [("mnemosyne_test", "TestReviewWidget")] self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) self.mnemosyne.start_review() MnemosyneTest.teardown(self) self.mnemosyne = Mnemosyne(upload_science_logs=True, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert( 0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.components.append(\ ("test_logging", "MyMainWidget")) self.mnemosyne.gui_for_component["ScheduledForgottenNew"] = \ [("mnemosyne_test", "TestReviewWidget")] self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) self.mnemosyne.start_review() MnemosyneTest.teardown(self) self.mnemosyne = Mnemosyne(upload_science_logs=True, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert( 0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.components.append(\ ("test_logging", "MyMainWidget")) self.mnemosyne.gui_for_component["ScheduledForgottenNew"] = \ [("mnemosyne_test", "TestReviewWidget")] self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) self.mnemosyne.start_review() def test_log_upload_bad_server(self): # Most reliable way of setting this variable is throug config.py, otherwise # it will stay alive in a dangling imported userconfig. config_py_file = os.path.join(self.mnemosyne.config().config_dir, "config.py") f = open(config_py_file, "w") print("science_server = \"noserver:80\"", file=f) f.close() machine_id_file = os.path.join(self.mnemosyne.config().config_dir, "machine.id") f = open(machine_id_file, "w") print("TESTMACHINE", file=f) f.close() self.config().change_user_id("UPLOADTEST") self.config()["max_log_size_before_upload"] = 1 MnemosyneTest.teardown(self) self.mnemosyne = Mnemosyne(upload_science_logs=True, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert( 0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.components.append(\ ("test_logging", "MyMainWidget")) self.mnemosyne.gui_for_component["ScheduledForgottenNew"] = \ [("mnemosyne_test", "TestReviewWidget")] self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) self.mnemosyne.start_review() MnemosyneTest.teardown(self) self.mnemosyne = Mnemosyne(upload_science_logs=True, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert( 0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.components.append(\ ("test_logging", "MyMainWidget")) self.mnemosyne.gui_for_component["ScheduledForgottenNew"] = \ [("mnemosyne_test", "TestReviewWidget")] self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) self.mnemosyne.start_review() def mem_importer(self): for format in self.mnemosyne.component_manager.all("file_format"): if format.__class__.__name__ == "Mnemosyne1Mem": return format def test_archive_old_logs(self): # Import old history. filename = os.path.join(os.getcwd(), "tests", "files", "basedir_bz2", "default.mem") self.mem_importer().do_import(filename) assert self.database().con.execute( "select count() from log").fetchone()[0] == 23 assert not os.path.exists(os.path.join("dot_test", "archive")) # Archive. self.database().archive_old_logs() assert self.database().con.execute( "select count() from log").fetchone()[0] == 12 archive_name = os.listdir( os.path.join(os.getcwd(), "dot_test", "archive"))[0] archive_path = os.path.join(os.getcwd(), "dot_test", "archive", archive_name) import sqlite3 arch_con = sqlite3.connect(archive_path) assert arch_con.execute("select count() from log").fetchone()[0] == 11
class TestMemImport(MnemosyneTest): def setup(self): self.initialise_data_dir() self.mnemosyne = Mnemosyne(upload_science_logs=False, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert(0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.gui_for_component["ScheduledForgottenNew"] = \ [("mnemosyne_test", "TestReviewWidget")] self.mnemosyne.components.append(\ ("test_mem_import", "Widget")) self.mnemosyne.components.append(\ ("test_mem_import", "MyImportDialog")) self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) self.review_controller().reset() def mem_importer(self): for format in self.mnemosyne.component_manager.all("file_format"): if format.__class__.__name__ == "Mnemosyne1Mem": return format @mock.patch("mnemosyne.libmnemosyne.file_formats.mnemosyne1_mem.open", mock.Mock(side_effect=[FileNotFoundError, IndexError("Mocked Error")])) def test_exceptions(self): filename = os.path.join(os.getcwd(), "tests", "files", "nothere.mem") self.mem_importer().do_import(filename) assert last_error.startswith("Unable to open") self.mem_importer().do_import("name_does_not_matter") assert last_error.strip().endswith("IndexError: Mocked Error") def test_card_type_1(self): filename = os.path.join(os.getcwd(), "tests", "files", "1sided.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.database().card_count() == 1 card = self.review_controller().card assert card.grade == 2 assert card.easiness == 2.5 assert card.acq_reps == 1 assert card.ret_reps == 0 assert card.lapses == 0 assert card.acq_reps_since_lapse == 1 assert card.ret_reps_since_lapse == 0 assert [tag.name for tag in card.tags] == ["__UNTAGGED__"] assert card.last_rep == 1247529600 assert card.next_rep == 1247616000 assert card.id == "9cff728f" def test_card_type_1_unseen(self): filename = os.path.join(os.getcwd(), "tests", "files", "1sided_unseen.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.database().card_count() == 1 card = self.review_controller().card assert card.grade == -1 assert card.easiness == 2.5 assert card.acq_reps == 0 assert card.ret_reps == 0 assert card.lapses == 0 assert card.acq_reps_since_lapse == 0 assert card.ret_reps_since_lapse == 0 assert card.last_rep == -1 assert card.next_rep == -1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 def test_card_type_1_edited(self): filename = os.path.join(os.getcwd(), "tests", "files", "1sided.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.database().card_count() == 1 card = self.review_controller().card assert card.id == "9cff728f" assert "question" in card.question() filename = os.path.join(os.getcwd(), "tests", "files", "1sided.mem") self.mem_importer().do_import(filename) assert last_error.startswith("These cards seem to have been imported before") def test_card_type_2(self): filename = os.path.join(os.getcwd(), "tests", "files", "2sided.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.database().card_count() == 2 card_1 = self.review_controller().card assert "question" in card_1.question() assert "answer" in card_1.answer() cards = self.database().cards_from_fact(card_1.fact) if cards[0] == card_1: card_2 = cards[1] else: card_2 = cards[0] assert "question" in card_2.answer() assert "answer" in card_2.question() assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 2 def test_card_type_3(self): filename = os.path.join(os.getcwd(), "tests", "files", "3sided.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.database().card_count() == 2 card_1 = self.review_controller().card assert card_1.fact.data == {"f": "f", "p_1": "p", "m_1": "t"} assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 2 def test_card_type_3_corrupt(self): filename = os.path.join(os.getcwd(), "tests", "files", "3sided_corrupt.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.database().card_count() == 2 card_1 = self.review_controller().card assert card_1.fact.data == {"f": "f", "m_1": "t"} assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 2 def test_card_type_3_missing(self): filename = os.path.join(os.getcwd(), "tests", "files", "3sided_missing.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.database().card_count() == 1 card_1 = self.review_controller().card assert card_1.fact.data == {"f": "t", "b": "f\np"} assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 def test_media(self): os.mkdir(os.path.join(os.getcwd(), "tests", "files", "figs")) os.mkdir(os.path.join(os.getcwd(), "tests", "files", "figs", "figs")) figures = [\ os.path.join(os.getcwd(), "tests", "files", "a.png"), os.path.join(os.getcwd(), "tests", "files", "figs", "a.png"), os.path.join(os.getcwd(), "tests", "files", "figs", "figs", "a.png")] for filename in figures: open(filename, "w") filename = os.path.join(os.getcwd(), "tests", "files", "media.mem") self.mem_importer().do_import(filename) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "a.png")) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "figs", "a.png")) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "figs", "a.png")) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_MEDIA_FILE, )).fetchone()[0] == 3 def test_media_missing(self): os.mkdir(os.path.join(os.getcwd(), "tests", "files", "figs")) os.mkdir(os.path.join(os.getcwd(), "tests", "files", "figs", "figs")) figures = [\ os.path.join(os.getcwd(), "tests", "files", "a.png"), os.path.join(os.getcwd(), "tests", "files", "figs", "a.png")] for filename in figures: open(filename, "w") filename = os.path.join(os.getcwd(), "tests", "files", "media.mem") self.mem_importer().do_import(filename) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "a.png")) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "figs", "a.png")) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_MEDIA_FILE, )).fetchone()[0] == 2 def test_media_missing_2(self): filename = os.path.join(os.getcwd(), "tests", "files", "media.mem") self.mem_importer().do_import(filename) assert not os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "a.png")) assert not os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "figs", "a.png")) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_MEDIA_FILE, )).fetchone()[0] == 0 def test_media_slashes(self): os.mkdir(os.path.join(os.getcwd(), "tests", "files", "figs")) os.mkdir(os.path.join(os.getcwd(), "tests", "files", "figs", "figs")) figures = [\ os.path.join(os.getcwd(), "tests", "files", "a.png"), os.path.join(os.getcwd(), "tests", "files", "figs", "a.png"), os.path.join(os.getcwd(), "tests", "files", "figs", "figs", "a.png")] for filename in figures: open(filename, "w") filename = os.path.join(os.getcwd(), "tests", "files", "media_slashes.mem") self.mem_importer().do_import(filename) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "a.png")) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "figs", "a.png")) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "figs", "a.png")) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_MEDIA_FILE, )).fetchone()[0] == 3 def test_media_quotes(self): filename = os.path.join(os.getcwd(), "tests", "files", "basedir_media", "default.mem") self.mem_importer().do_import(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_MEDIA_FILE, )).fetchone()[0] == 1 def test_sound(self): os.mkdir(os.path.join(\ os.getcwd(), "tests", "files", "soundfiles")) soundname = os.path.join(os.path.join(\ os.getcwd(), "tests", "files", "soundfiles", "a.ogg")) open(soundname, "w") filename = os.path.join(os.getcwd(), "tests", "files", "sound.mem") self.mem_importer().do_import(filename) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "soundfiles", "a.ogg")) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_MEDIA_FILE, )).fetchone()[0] == 1 self.review_controller().reset() card = self.review_controller().card assert card.fact["f"] == """<audio src="soundfiles/a.ogg">""" def test_map(self): filename = os.path.join(os.getcwd(), "tests", "files", "map.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.database().card_count() == 2 card = self.review_controller().card assert card.fact["loc"] == "<b>Drenthe</b>" assert card.fact["marked"] == \ """<img src_missing="maps/Netherlands-Provinces/Drenthe.png">""" assert card.fact["blank"] == \ """<img src_missing="maps/Netherlands-Provinces/Netherlands-Provinces.png">""" def test_dups(self): filename = os.path.join(os.getcwd(), "tests", "files", "dups.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.review_controller().card.fact["loc"] == \ """<b>Freistaat Th\xfcringen (Free State of Thuringia)</b>""" assert self.review_controller().card.tag_string() == "Germany: States, MISSING_MEDIA" def test_logs_new_1(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "new_1.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 10 assert self.database().con.execute(\ "select acq_reps from log where event_type=? and object_id='9525224f'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select acq_reps_since_lapse from log where event_type=? and object_id='9525224f'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select scheduled_interval from log where event_type=? and object_id='9525224f' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == (6)*60*60*24 assert self.database().con.execute(\ """select actual_interval from log where event_type=? and object_id='9525224f' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 0 # This is an artificial log. timestamp = self.database().con.execute(\ """select timestamp from log where event_type=? and object_id='9525224f' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] next_rep = self.database().con.execute(\ """select next_rep from log where event_type=? and object_id='9525224f' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] assert next_rep - timestamp == (14-3)*60*60*24 assert self.database().con.execute(\ "select count() from log").fetchone()[0] == 25 assert self.database().con.execute(\ "select acq_reps from log where event_type=? order by _id desc limit 1", (EventTypes.LOADED_DATABASE, )).fetchone()[0] == 0 assert self.database().con.execute(\ "select ret_reps from log where event_type=? order by _id desc limit 1", (EventTypes.LOADED_DATABASE, )).fetchone()[0] == 7 assert self.database().con.execute(\ "select lapses from log where event_type=? order by _id desc limit 1", (EventTypes.LOADED_DATABASE, )).fetchone()[0] == 336 assert self.database().con.execute(\ "select acq_reps from log where event_type=? order by _id desc limit 1", (EventTypes.SAVED_DATABASE, )).fetchone()[0] == 0 assert self.database().con.execute(\ "select ret_reps from log where event_type=? order by _id desc limit 1", (EventTypes.SAVED_DATABASE, )).fetchone()[0] == 12 assert self.database().con.execute(\ "select lapses from log where event_type=? order by _id desc limit 1", (EventTypes.SAVED_DATABASE, )).fetchone()[0] == 341 def test_logs_new_2(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "new_2.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select acq_reps from log where event_type=? and object_id='8da62cfb'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select acq_reps_since_lapse from log where event_type=? and object_id='8da62cfb'", (EventTypes.REPETITION, )).fetchone()[0] == 1 def test_logs_new_3(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "new_3.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 4 assert self.database().con.execute(\ "select acq_reps from log where event_type=? and object_id='5106b621'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select acq_reps_since_lapse from log where event_type=? and object_id='5106b621'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select acq_reps from log where event_type=? and object_id='5106b621' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select acq_reps_since_lapse from log where event_type=? and object_id='5106b621' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 1 def test_logs_new_4(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "new_4.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 2 assert self.database().con.execute(\ "select acq_reps from log where event_type=? and object_id='b7601e0c'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select ret_reps from log where event_type=? and object_id='b7601e0c'", (EventTypes.REPETITION, )).fetchone()[0] == 0 assert self.database().con.execute(\ "select acq_reps_since_lapse from log where event_type=? and object_id='b7601e0c'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select acq_reps from log where event_type=? and object_id='b7601e0c' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select ret_reps from log where event_type=? and object_id='b7601e0c' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select acq_reps_since_lapse from log where event_type=? and object_id='b7601e0c' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 1 def test_logs_new_5(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "new_5.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 2 assert self.database().con.execute(\ "select acq_reps from log where event_type=? and object_id='9c8ce28e-1a4b-4148-8287-b8a7790d86d0.1.1'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select ret_reps from log where event_type=? and object_id='9c8ce28e-1a4b-4148-8287-b8a7790d86d0.1.1'", (EventTypes.REPETITION, )).fetchone()[0] == 0 assert self.database().con.execute(\ "select acq_reps_since_lapse from log where event_type=? and object_id='9c8ce28e-1a4b-4148-8287-b8a7790d86d0.1.1'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select acq_reps from log where event_type=? and object_id='9c8ce28e-1a4b-4148-8287-b8a7790d86d0.1.1' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 2 assert self.database().con.execute(\ """select ret_reps from log where event_type=? and object_id='9c8ce28e-1a4b-4148-8287-b8a7790d86d0.1.1' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 0 assert self.database().con.execute(\ """select acq_reps_since_lapse from log where event_type=? and object_id='9c8ce28e-1a4b-4148-8287-b8a7790d86d0.1.1' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 2 assert self.database().con.execute(\ """select object_id from log where event_type=?""", (EventTypes.STARTED_SCHEDULER, )).fetchone()[0] == "SM2 Mnemosyne" def test_logs_new_6(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "new_6.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 2 sql_res = self.database().con.execute(\ "select * from log where event_type=? and object_id='4c53e29a-f9e9-498b-8beb-d3a494f61bca.1.1'", (EventTypes.REPETITION, )).fetchone() assert sql_res[4] == 5 assert sql_res[5] == 2.5 assert sql_res[6] == 1 assert sql_res[7] == 0 assert sql_res[8] == 0 assert sql_res[9] == 1 assert sql_res[10] == 0 assert sql_res[11] == 0 assert sql_res[12] == 0 assert sql_res[14] - sql_res[2] == 345600 assert sql_res[13] == 0 sql_res = self.database().con.execute(\ """select * from log where event_type=? and object_id='4c53e29a-f9e9-498b-8beb-d3a494f61bca.1.1' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone() assert sql_res[4] == 2 assert sql_res[5] == 2.5 assert sql_res[6] == 1 assert sql_res[7] == 1 assert sql_res[8] == 0 assert sql_res[9] == 1 assert sql_res[10] == 1 assert sql_res[11] == 302986 assert sql_res[12] == 10 assert sql_res[14] - sql_res[2] == 475774 assert sql_res[13] == 1 def test_logs_imported_1(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "imported_1.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 3 assert self.database().con.execute(\ "select acq_reps from log where event_type=? and object_id='f5d9bbe7'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select ret_reps from log where event_type=? and object_id='f5d9bbe7'", (EventTypes.REPETITION, )).fetchone()[0] == 0 assert self.database().con.execute(\ "select acq_reps_since_lapse from log where event_type=? and object_id='f5d9bbe7'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select acq_reps from log where event_type=? and object_id='f5d9bbe7' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select ret_reps from log where event_type=? and object_id='f5d9bbe7' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 2 assert self.database().con.execute(\ """select acq_reps_since_lapse from log where event_type=? and object_id='f5d9bbe7' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 1 def test_logs_imported_2(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "imported_2.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select acq_reps from log where event_type=? and object_id='14670f10'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select ret_reps from log where event_type=? and object_id='14670f10'", (EventTypes.REPETITION, )).fetchone()[0] == 0 assert self.database().con.execute(\ "select acq_reps_since_lapse from log where event_type=? and object_id='14670f10'", (EventTypes.REPETITION, )).fetchone()[0] == 1 def test_logs_imported_3(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "imported_3.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 def test_restored_1(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "restored_1.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 1 sql_res = self.database().con.execute(\ "select * from log where event_type=?", (EventTypes.REPETITION, )).fetchone() assert sql_res[4] == 1 assert sql_res[5] == 2.36 assert sql_res[6] == 23 assert sql_res[7] == 8 assert sql_res[8] == 2 assert sql_res[9] == 0 assert sql_res[10] == 0 assert sql_res[11] == 89 * 24 * 60 * 60 assert sql_res[12] == 0 # No last rep data. assert sql_res[14] - sql_res[2] == 0 assert sql_res[13] == 5 def test_restored_2(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "restored_2.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 def test_logs_act_interval(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "actinterval_1.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ """select actual_interval from log where event_type=? and object_id='f1300e5a' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 5 def test_logs_deleted(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "delete_1.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.DELETED_CARD, )).fetchone()[0] == 1 def test_logs_corrupt_1(self): # Wrong data, missing creation event. self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "corrupt_1.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where object_id=?", ("4b59b830", )).fetchone()[0] == 3 def test_logs_corrupt_2(self): # Wrong data, isolated deletion event. self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "corrupt_2.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 0 assert self.database().con.execute(\ "select count() from log where object_id=?", ("4b59b830", )).fetchone()[0] == 0 def test_two_mem_files_sharing_same_logs(self): filename = os.path.join(os.getcwd(), "tests", "files", "basedir_2_mem", "deck1.mem") self.mem_importer().do_import(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 1 filename = os.path.join(os.getcwd(), "tests", "files", "basedir_2_mem", "deck2.mem") self.mem_importer().do_import(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 3 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 2 card = self.database().card("4c8fff73", is_id_internal=False) assert self.database().average_thinking_time(card) == 1.5 assert self.database().total_thinking_time(card) == 3.0 assert self.database().card_count_for_grade(0, active_only=True) == 2 tag = self.database().get_or_create_tag_with_name("666") assert self.database().card_count_for_grade_and_tag(0, tag, active_only=True) == 0 from mnemosyne.libmnemosyne.statistics_pages.grades import Grades page = Grades(component_manager=self.mnemosyne.component_manager) page.prepare_statistics(tag._id) assert page.y == [0, 0, 0, 0, 0, 0, 0] page.prepare_statistics(-1) assert page.y == [0, 2, 0, 0, 0, 0, 0] def test_bz2(self): filename = os.path.join(os.getcwd(), "tests", "files", "basedir_bz2", "default.mem") self.mem_importer().do_import(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 0 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where object_id=?", ("82f2ed0d", )).fetchone()[0] == 0 def test_sch(self): self.controller().show_import_file_dialog() assert self.database().card_count_scheduled_n_days_ago(0) == 1 def test_upgrade(self): old_data_dir = os.path.join(os.getcwd(), "tests", "files", "basedir_bz2") from mnemosyne.libmnemosyne.upgrades.upgrade1 import Upgrade1 Upgrade1(self.mnemosyne.component_manager).upgrade_from_old_data_dir(old_data_dir) assert self.config()["dvipng"].rstrip() == \ "dvipng -D 300 -T tight tmp.dvi\necho" assert "14pt" in self.config()["latex_preamble"] assert self.config()["user_id"] == "f3fb13c7" assert self.log().log_index_of_last_upload() == 2 assert os.path.exists(os.path.join(old_data_dir, "DIRECTORY_NO_LONGER_USED_BY_MNEMOSYNE2")) assert os.path.exists(os.path.join(self.mnemosyne.config().data_dir, "history", "a_2.bz2")) log = open(os.path.join(self.mnemosyne.config().data_dir, "log.txt")) assert log.readline().strip() == \ "2005-11-01 09:29:08 : Imported item 82f2ed0d 0 0 0 0 0" def teardown(self): filename = os.path.join(os.getcwd(), "tests", "files", "basedir_bz2", "DIRECTORY_NO_LONGER_USED_BY_MNEMOSYNE2") if os.path.exists(filename): os.remove(filename) filename = os.path.join(os.getcwd(), "tests", "files", "a.png") if os.path.exists(filename): os.remove(filename) filename = os.path.join(os.getcwd(), "tests", "files", "a.ogg") if os.path.exists(filename): os.remove(filename) dirname = os.path.join(os.getcwd(), "tests", "files", "figs") if os.path.exists(dirname): shutil.rmtree(dirname) dirname = os.path.join(os.getcwd(), "tests", "files", "soundfiles") if os.path.exists(dirname): shutil.rmtree(dirname) MnemosyneTest.teardown(self)
class WebServer(Component): def __init__(self, port, data_dir, config_dir, filename, **kwds): if "client_on_same_machine_as_server" in kwds: self.client_on_same_machine_as_server = \ kwds["client_on_same_machine_as_server"] del kwds["client_on_same_machine_as_server"] else: self.client_on_same_machine_as_server = False super().__init__(**kwds) self.wsgi_server = None self.port = port self.data_dir = data_dir self.config_dir = config_dir self.filename = filename # When restarting the server, make sure we discard info from the # browser resending the form from the previous session. self.is_just_started = True self.is_mnemosyne_loaded = False self.is_shutting_down = False def activate(self): Component.activate(self) # Late import to speed up application startup. from cheroot import wsgi self.wsgi_server = wsgi.Server(\ ("0.0.0.0", self.port), self.wsgi_app, server_name="localhost", numthreads=1, timeout=5) # We need to set the timeout relatively low, otherwise it will take # too long for the server to process a 'stop' request. def serve_until_stopped(self): try: self.wsgi_server.start() # Sets self.wsgi_server.ready except KeyboardInterrupt: self.wsgi_server.stop() self.unload_mnemosyne() def stop(self): if self.wsgi_server: self.wsgi_server.stop() self.unload_mnemosyne() def load_mnemosyne(self): self.mnemosyne = Mnemosyne(upload_science_logs=True, interested_in_old_reps=True) self.mnemosyne.components.insert( 0, (("mnemosyne.libmnemosyne.gui_translators.gettext_gui_translator", "GetTextGuiTranslator"))) self.mnemosyne.components.append(\ ("mnemosyne.libmnemosyne.ui_components.main_widget", "MainWidget")) self.mnemosyne.components.append(\ ("mnemosyne.web_server.web_server_render_chain", "WebServerRenderChain")) self.mnemosyne.gui_for_component["ScheduledForgottenNew"] = [\ ("mnemosyne.web_server.review_wdgt", "ReviewWdgt")] self.mnemosyne.gui_for_component["NewOnly"] = [\ ("mnemosyne.web_server.review_wdgt", "ReviewWdgt")] self.mnemosyne.gui_for_component["CramAll"] = [\ ("mnemosyne.web_server.review_wdgt", "ReviewWdgt")] self.mnemosyne.gui_for_component["CramRecent"] = [\ ("mnemosyne.web_server.review_wdgt", "ReviewWdgt")] self.mnemosyne.initialise(self.data_dir, config_dir=self.config_dir, filename=self.filename, automatic_upgrades=False) self.save_after_n_reps = self.mnemosyne.config()["save_after_n_reps"] self.mnemosyne.config()["save_after_n_reps"] = 1 self.mnemosyne.config()["study_mode"] = "ScheduledForgottenNew" self.mnemosyne.config()["QA_split"] = "fixed" self.mnemosyne.review_widget().set_client_on_same_machine_as_server(\ self.client_on_same_machine_as_server) self.mnemosyne.controller().reset_study_mode() self.is_mnemosyne_loaded = True self.release_database_after_timeout = \ ReleaseDatabaseAfterTimeout(self.port) self.release_database_after_timeout.start() def unload_mnemosyne(self): if not self.is_mnemosyne_loaded: return self.mnemosyne.config()["save_after_n_reps"] = self.save_after_n_reps self.mnemosyne.finalise() self.is_mnemosyne_loaded = False def wsgi_app(self, environ, start_response): filename = environ["PATH_INFO"] if filename == "/status": response_headers = [("Content-type", "text/html")] start_response("200 OK", response_headers) return [b"200 OK"] # Sometimes, even after the user has clicked 'exit' in the page, # a browser sends a request for e.g. an audio file. if self.is_shutting_down and filename != "/release_database": response_headers = [("Content-type", "text/html")] start_response("503 Service Unavailable", response_headers) return [b"Server stopped"] # Load database if needed. if not self.is_mnemosyne_loaded and filename != "/release_database": self.load_mnemosyne() self.release_database_after_timeout.ping() # All our request return to the root page, so if the path is '/', # return the html of the review widget. if filename == "/": # Process clicked buttons in the form. form = cgi.FieldStorage(fp=environ["wsgi.input"], environ=environ) if "show_answer" in form and not self.is_just_started: self.mnemosyne.review_widget().show_answer() page = self.mnemosyne.review_widget().to_html() elif "grade" in form and not self.is_just_started: grade = int(form["grade"].value) self.mnemosyne.review_widget().grade_answer(grade) page = self.mnemosyne.review_widget().to_html() elif "star" in form: self.mnemosyne.controller().star_current_card() page = self.mnemosyne.review_widget().to_html() elif "exit" in form: self.unload_mnemosyne() page = "Server stopped" self.wsgi_server.stop() self.stop_server_after_timeout = \ StopServerAfterTimeout(self.wsgi_server) self.stop_server_after_timeout.start() self.is_shutting_down = True else: page = self.mnemosyne.review_widget().to_html() if self.is_just_started: self.is_just_started = False # Serve the web page. response_headers = [("Content-type", "text/html")] start_response("200 OK", response_headers) return [page] elif filename == "/release_database": self.unload_mnemosyne() response_headers = [("Content-type", "text/html")] start_response("200 OK", response_headers) return [b"200 OK"] # We need to serve a media file. else: # Late import to speed up application startup. from webob import Request from webob.static import FileApp full_path = self.mnemosyne.database().media_dir() for word in filename.split("/"): full_path = os.path.join(full_path, word) request = Request(environ) if os.path.exists(full_path): etag = "%s-%s-%s" % (os.path.getmtime(full_path), os.path.getsize(full_path), hash(full_path)) else: etag = "none" app = FileApp(full_path, etag=etag) return app(request)(environ, start_response)
("mnemosyne.libmnemosyne.statistics_pages.easiness", "Easiness"), ("mnemosyne.libmnemosyne.statistics_pages.current_card", "CurrentCard"), ("main_wdgt", "MainWdgt")] mnemosyne.initialise(data_dir, filename=filename) # Sync before starting the review server. if mnemosyne.main_widget().show_question(\ "Perform sync?", "Yes", "No", "") == 0: mnemosyne.controller().sync(sync_server, sync_port, sync_username, sync_password) # Make sure the config gets picked up when starting a new # Mnemosyne instance in the web server. mnemosyne.config().save() # Start review server. mnemosyne.database().release_connection() from mnemosyne.web_server.web_server import WebServerThread web_server_thread = WebServerThread\ (mnemosyne.component_manager, is_server_local=True) web_server_thread.daemon = True web_server_thread.start() if mnemosyne.main_widget().show_question(\ "Review server started. Either let Mnemosyne start Chrome, or, if you have problems with sound, start Firefox yourself and go to 127.0.0.1:8513, otherwise click below to start Chrome.", "Start Chrome", "Don't start Chrome", "") == 0: mnemosyne.main_widget().start_native_browser() web_server_thread.join() # Sync again after the user has closed the program from the web server.
class MyClient(Client): program_name = "Mnemosyne" program_version = "test" capabilities = "TODO" def __init__(self): shutil.rmtree(os.path.abspath("dot_sync_client"), ignore_errors=True) self.mnemosyne = Mnemosyne(upload_science_logs=False, interested_in_old_reps=True) self.mnemosyne.components = [ ("mnemosyne.libmnemosyne.translator", "NoTranslation"), ("mnemosyne.libmnemosyne.databases.SQLite", "SQLite"), ("mnemosyne.libmnemosyne.configuration", "Configuration"), ("mnemosyne.libmnemosyne.loggers.database_logger", "DatabaseLogger"), ("mnemosyne.libmnemosyne.schedulers.SM2_mnemosyne", "SM2Mnemosyne"), ("mnemosyne.libmnemosyne.stopwatch", "Stopwatch"), ("mnemosyne.libmnemosyne.card_types.front_to_back", "FrontToBack"), ("mnemosyne.libmnemosyne.card_types.both_ways", "BothWays"), ("mnemosyne.libmnemosyne.card_types.vocabulary", "Vocabulary"), ("mnemosyne.libmnemosyne.renderers.html_css", "HtmlCss"), ("mnemosyne.libmnemosyne.filters.escape_to_html", "EscapeToHtml"), ("mnemosyne.libmnemosyne.filters.expand_paths", "ExpandPaths"), ("mnemosyne.libmnemosyne.filters.latex", "Latex"), ("mnemosyne.libmnemosyne.render_chains.default_render_chain", "DefaultRenderChain"), ("mnemosyne.libmnemosyne.render_chains.plain_text_chain", "PlainTextChain"), ("mnemosyne.libmnemosyne.controllers.default_controller", "DefaultController"), ("mnemosyne.libmnemosyne.review_controllers.SM2_controller", "SM2Controller"), ("mnemosyne.libmnemosyne.card_types.map", "MapPlugin"), ("mnemosyne.libmnemosyne.card_types.cloze", "ClozePlugin"), ("mnemosyne.libmnemosyne.criteria.default_criterion", "DefaultCriterion"), ("mnemosyne.libmnemosyne.databases.SQLite_criterion_applier", "DefaultCriterionApplier"), ("mnemosyne.libmnemosyne.plugins.cramming_plugin", "CrammingPlugin") ] self.mnemosyne.components.append(("benchmark_sync_client", "Widget")) self.mnemosyne.components.append(("benchmark_sync_client", "MyReviewWidget")) self.mnemosyne.initialise(os.path.abspath(os.path.join(os.getcwd(), "dot_sync_client")), automatic_upgrades=False) self.mnemosyne.config().change_user_id("user_id") self.check_for_edited_local_media_files = False self.do_backup = False self.mnemosyne.review_controller().reset() # Do 200 reviews. card_type = self.mnemosyne.card_type_with_id("1") fact_data = {"f": "question", "b": "answer"} card = self.mnemosyne.controller().create_new_cards(fact_data, card_type, grade=-1, tag_names=["default"])[0] self.mnemosyne.database().save() self.mnemosyne.review_controller().show_new_question() for i in range(200): self.mnemosyne.review_controller().show_answer() self.mnemosyne.review_controller().grade_answer(0) Client.__init__(self, "client_machine_id", self.mnemosyne.database(), self.mnemosyne.main_widget()) def do_sync(self): #self.BUFFER_SIZE = 10*8192 #self.behind_proxy = True self.sync("localhost", 8186, "user", "pass") self.mnemosyne.database().save()
class WebServer(Component): def __init__(self, component_manager, port, data_dir, config_dir, filename, is_server_local=False): Component.__init__(self, component_manager) self.port = port self.data_dir = data_dir self.config_dir = config_dir self.filename = filename self.is_server_local = is_server_local # When restarting the server, make sure we discard info from the # browser resending the form from the previous session. self.is_just_started = True self.is_mnemosyne_loaded = False self.is_shutting_down = False self.wsgi_server = wsgiserver.CherryPyWSGIServer(\ ("0.0.0.0", port), self.wsgi_app, server_name="localhost", numthreads=1, timeout=5) # We need to set the timeout relatively low, otherwise it will take # too long for the server to process a 'stop' request. def serve_until_stopped(self): try: self.wsgi_server.start() # Sets self.wsgi_server.ready except KeyboardInterrupt: self.wsgi_server.stop() self.unload_mnemosyne() def stop(self): self.wsgi_server.stop() self.unload_mnemosyne() def load_mnemosyne(self): self.mnemosyne = Mnemosyne(upload_science_logs=True, interested_in_old_reps=True) self.mnemosyne.components.insert(0, ( ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator"))) self.mnemosyne.components.append(\ ("mnemosyne.libmnemosyne.ui_components.main_widget", "MainWidget")) self.mnemosyne.components.append(\ ("mnemosyne.web_server.review_wdgt", "ReviewWdgt")) self.mnemosyne.components.append(\ ("mnemosyne.web_server.web_server_render_chain", "WebServerRenderChain")) self.mnemosyne.initialise(self.data_dir, config_dir=self.config_dir, filename=self.filename, automatic_upgrades=False) self.mnemosyne.review_controller().set_render_chain("web_server") self.save_after_n_reps = self.mnemosyne.config()["save_after_n_reps"] self.mnemosyne.config()["save_after_n_reps"] = 1 self.mnemosyne.start_review() self.mnemosyne.review_widget().set_is_server_local(\ self.is_server_local) self.is_mnemosyne_loaded = True self.release_database_after_timeout = \ ReleaseDatabaseAfterTimeout(self.port) self.release_database_after_timeout.start() def unload_mnemosyne(self): if not self.is_mnemosyne_loaded: return self.mnemosyne.config()["save_after_n_reps"] = self.save_after_n_reps self.mnemosyne.finalise() self.is_mnemosyne_loaded = False def wsgi_app(self, environ, start_response): filename = environ["PATH_INFO"].decode("utf-8") if filename == "/status": response_headers = [("Content-type", "text/html")] start_response("200 OK", response_headers) return ["200 OK"] # Sometimes, even after the user has clicked 'exit' in the page, # a browser sends a request for e.g. an audio file. if self.is_shutting_down and filename != "/release_database": response_headers = [("Content-type", "text/html")] start_response("503 Service Unavailable", response_headers) return ["Server stopped"] # Load database if needed. if not self.is_mnemosyne_loaded and filename != "/release_database": self.load_mnemosyne() self.release_database_after_timeout.ping() # All our request return to the root page, so if the path is '/', # return the html of the review widget. if filename == "/": # Process clicked buttons in the form. form = cgi.FieldStorage(fp=environ["wsgi.input"], environ=environ) if "show_answer" in form and not self.is_just_started: self.mnemosyne.review_widget().show_answer() page = self.mnemosyne.review_widget().to_html() elif "grade" in form and not self.is_just_started: grade = int(form["grade"].value) self.mnemosyne.review_widget().grade_answer(grade) page = self.mnemosyne.review_widget().to_html() elif "star" in form: self.mnemosyne.controller().star_current_card() page = self.mnemosyne.review_widget().to_html() elif "exit" in form: self.unload_mnemosyne() page = "Server stopped" self.wsgi_server.stop() self.stop_server_after_timeout = \ StopServerAfterTimeout(self.wsgi_server) self.stop_server_after_timeout.start() self.is_shutting_down = True else: page = self.mnemosyne.review_widget().to_html() if self.is_just_started: self.is_just_started = False # Serve the web page. response_headers = [("Content-type", "text/html")] start_response("200 OK", response_headers) return [page] elif filename == "/release_database": self.unload_mnemosyne() response_headers = [("Content-type", "text/html")] start_response("200 OK", response_headers) return ["200 OK"] # We need to serve a media file. else: full_path = self.mnemosyne.database().media_dir() for word in filename.split("/"): full_path = os.path.join(full_path, word) request = Request(environ) # Check if file exists, but work around Android not reporting # the correct filesystem encoding. try: exists = os.path.exists(full_path) except (UnicodeEncodeError, UnicodeDecodeError): _ENCODING = sys.getfilesystemencoding() or \ locale.getdefaultlocale()[1] or "utf-8" full_path = full_path.encode(_ENCODING) if os.path.exists(full_path): etag = "%s-%s-%s" % (os.path.getmtime(full_path), os.path.getsize(full_path), hash(full_path)) else: etag = "none" app = FileApp(full_path, etag=etag) return app(request)(environ, start_response)
class TestMemImport(MnemosyneTest): def setup(self): self.initialise_data_dir() self.mnemosyne = Mnemosyne(upload_science_logs=False, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert(0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.components.append(\ ("mnemosyne_test", "TestReviewWidget")) self.mnemosyne.components.append(\ ("test_mem_import", "Widget")) self.mnemosyne.components.append(\ ("test_mem_import", "MyImportDialog")) self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) self.review_controller().reset() def mem_importer(self): for format in self.mnemosyne.component_manager.all("file_format"): if format.__class__.__name__ == "Mnemosyne1Mem": return format def test_file_not_found(self): filename = os.path.join(os.getcwd(), "tests", "files", "nothere.mem") self.mem_importer().do_import(filename) assert last_error.startswith("Unable to open") def test_card_type_1(self): filename = os.path.join(os.getcwd(), "tests", "files", "1sided.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.database().card_count() == 1 card = self.review_controller().card assert card.grade == 2 assert card.easiness == 2.5 assert card.acq_reps == 1 assert card.ret_reps == 0 assert card.lapses == 0 assert card.acq_reps_since_lapse == 1 assert card.ret_reps_since_lapse == 0 assert [tag.name for tag in card.tags] == ["__UNTAGGED__"] assert card.last_rep == 1247529600 assert card.next_rep == 1247616000 assert card.id == "9cff728f" def test_card_type_1_unseen(self): filename = os.path.join(os.getcwd(), "tests", "files", "1sided_unseen.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.database().card_count() == 1 card = self.review_controller().card assert card.grade == -1 assert card.easiness == 2.5 assert card.acq_reps == 0 assert card.ret_reps == 0 assert card.lapses == 0 assert card.acq_reps_since_lapse == 0 assert card.ret_reps_since_lapse == 0 assert card.last_rep == -1 assert card.next_rep == -1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 def test_card_type_1_edited(self): filename = os.path.join(os.getcwd(), "tests", "files", "1sided.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.database().card_count() == 1 card = self.review_controller().card assert card.id == "9cff728f" assert "question" in card.question() filename = os.path.join(os.getcwd(), "tests", "files", "1sided.mem") self.mem_importer().do_import(filename) assert last_error.startswith("These cards seem to have been imported before") def test_card_type_2(self): filename = os.path.join(os.getcwd(), "tests", "files", "2sided.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.database().card_count() == 2 card_1 = self.review_controller().card assert "question" in card_1.question() assert "answer" in card_1.answer() cards = self.database().cards_from_fact(card_1.fact) if cards[0] == card_1: card_2 = cards[1] else: card_2 = cards[0] assert "question" in card_2.answer() assert "answer" in card_2.question() assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 2 def test_card_type_3(self): filename = os.path.join(os.getcwd(), "tests", "files", "3sided.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.database().card_count() == 2 card_1 = self.review_controller().card assert card_1.fact.data == {"f": "f", "p_1": "p", "m_1": "t"} assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 2 def test_card_type_3_corrupt(self): filename = os.path.join(os.getcwd(), "tests", "files", "3sided_corrupt.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.database().card_count() == 2 card_1 = self.review_controller().card assert card_1.fact.data == {"f": "f", "m_1": "t"} assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 2 def test_card_type_3_missing(self): filename = os.path.join(os.getcwd(), "tests", "files", "3sided_missing.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.database().card_count() == 1 card_1 = self.review_controller().card assert card_1.fact.data == {"f": "t", "b": "f\np"} assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 def test_media(self): os.mkdir(os.path.join(os.getcwd(), "tests", "files", "figs")) os.mkdir(os.path.join(os.getcwd(), "tests", "files", "figs", "figs")) figures = [\ os.path.join(os.getcwd(), "tests", "files", "a.png"), os.path.join(os.getcwd(), "tests", "files", "figs", "a.png"), os.path.join(os.getcwd(), "tests", "files", "figs", "figs", "a.png")] for filename in figures: file(filename, "w") filename = os.path.join(os.getcwd(), "tests", "files", "media.mem") self.mem_importer().do_import(filename) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "a.png")) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "figs", "a.png")) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "figs", "a.png")) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_MEDIA_FILE, )).fetchone()[0] == 3 def test_media_missing(self): os.mkdir(os.path.join(os.getcwd(), "tests", "files", "figs")) os.mkdir(os.path.join(os.getcwd(), "tests", "files", "figs", "figs")) figures = [\ os.path.join(os.getcwd(), "tests", "files", "a.png"), os.path.join(os.getcwd(), "tests", "files", "figs", "a.png")] for filename in figures: file(filename, "w") filename = os.path.join(os.getcwd(), "tests", "files", "media.mem") self.mem_importer().do_import(filename) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "a.png")) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "figs", "a.png")) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_MEDIA_FILE, )).fetchone()[0] == 2 def test_media_missing_2(self): filename = os.path.join(os.getcwd(), "tests", "files", "media.mem") self.mem_importer().do_import(filename) assert not os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "a.png")) assert not os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "figs", "a.png")) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_MEDIA_FILE, )).fetchone()[0] == 0 def test_media_slashes(self): os.mkdir(os.path.join(os.getcwd(), "tests", "files", "figs")) os.mkdir(os.path.join(os.getcwd(), "tests", "files", "figs", "figs")) figures = [\ os.path.join(os.getcwd(), "tests", "files", "a.png"), os.path.join(os.getcwd(), "tests", "files", "figs", "a.png"), os.path.join(os.getcwd(), "tests", "files", "figs", "figs", "a.png")] for filename in figures: file(filename, "w") filename = os.path.join(os.getcwd(), "tests", "files", "media_slashes.mem") self.mem_importer().do_import(filename) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "a.png")) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "figs", "a.png")) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "figs", "a.png")) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_MEDIA_FILE, )).fetchone()[0] == 3 def test_media_quotes(self): filename = os.path.join(os.getcwd(), "tests", "files", "basedir_media", "default.mem") self.mem_importer().do_import(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_MEDIA_FILE, )).fetchone()[0] == 1 def test_sound(self): os.mkdir(os.path.join(\ os.getcwd(), "tests", "files", "soundfiles")) soundname = os.path.join(os.path.join(\ os.getcwd(), "tests", "files", "soundfiles", "a.ogg")) file(soundname, "w") filename = os.path.join(os.getcwd(), "tests", "files", "sound.mem") self.mem_importer().do_import(filename) assert os.path.exists(os.path.join(\ os.path.abspath("dot_test"), "default.db_media", "soundfiles", "a.ogg")) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_MEDIA_FILE, )).fetchone()[0] == 1 self.review_controller().reset() card = self.review_controller().card assert card.fact["f"] == """<audio src="soundfiles/a.ogg">""" def test_map(self): filename = os.path.join(os.getcwd(), "tests", "files", "map.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.database().card_count() == 2 card = self.review_controller().card assert card.fact["loc"] == "<b>Drenthe</b>" assert card.fact["marked"] == \ """<img src_missing="maps/Netherlands-Provinces/Drenthe.png">""" assert card.fact["blank"] == \ """<img src_missing="maps/Netherlands-Provinces/Netherlands-Provinces.png">""" def test_dups(self): filename = os.path.join(os.getcwd(), "tests", "files", "dups.mem") self.mem_importer().do_import(filename) self.review_controller().reset() assert self.review_controller().card.fact["loc"] == \ u"""<b>Freistaat Th\xfcringen (Free State of Thuringia)</b>""" assert self.review_controller().card.tag_string() == "Germany: States, MISSING_MEDIA" def test_logs_new_1(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "new_1.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 10 assert self.database().con.execute(\ "select acq_reps from log where event_type=? and object_id='9525224f'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select acq_reps_since_lapse from log where event_type=? and object_id='9525224f'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select scheduled_interval from log where event_type=? and object_id='9525224f' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == (6)*60*60*24 assert self.database().con.execute(\ """select actual_interval from log where event_type=? and object_id='9525224f' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 0 # This is an artificial log. timestamp = self.database().con.execute(\ """select timestamp from log where event_type=? and object_id='9525224f' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] next_rep = self.database().con.execute(\ """select next_rep from log where event_type=? and object_id='9525224f' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] assert next_rep - timestamp == (14-3)*60*60*24 assert self.database().con.execute(\ "select count() from log").fetchone()[0] == 25 assert self.database().con.execute(\ "select acq_reps from log where event_type=? order by _id desc limit 1", (EventTypes.LOADED_DATABASE, )).fetchone()[0] == 0 assert self.database().con.execute(\ "select ret_reps from log where event_type=? order by _id desc limit 1", (EventTypes.LOADED_DATABASE, )).fetchone()[0] == 7 assert self.database().con.execute(\ "select lapses from log where event_type=? order by _id desc limit 1", (EventTypes.LOADED_DATABASE, )).fetchone()[0] == 336 assert self.database().con.execute(\ "select acq_reps from log where event_type=? order by _id desc limit 1", (EventTypes.SAVED_DATABASE, )).fetchone()[0] == 0 assert self.database().con.execute(\ "select ret_reps from log where event_type=? order by _id desc limit 1", (EventTypes.SAVED_DATABASE, )).fetchone()[0] == 12 assert self.database().con.execute(\ "select lapses from log where event_type=? order by _id desc limit 1", (EventTypes.SAVED_DATABASE, )).fetchone()[0] == 341 def test_logs_new_2(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "new_2.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select acq_reps from log where event_type=? and object_id='8da62cfb'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select acq_reps_since_lapse from log where event_type=? and object_id='8da62cfb'", (EventTypes.REPETITION, )).fetchone()[0] == 1 def test_logs_new_3(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "new_3.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 4 assert self.database().con.execute(\ "select acq_reps from log where event_type=? and object_id='5106b621'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select acq_reps_since_lapse from log where event_type=? and object_id='5106b621'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select acq_reps from log where event_type=? and object_id='5106b621' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select acq_reps_since_lapse from log where event_type=? and object_id='5106b621' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 1 def test_logs_new_4(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "new_4.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 2 assert self.database().con.execute(\ "select acq_reps from log where event_type=? and object_id='b7601e0c'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select ret_reps from log where event_type=? and object_id='b7601e0c'", (EventTypes.REPETITION, )).fetchone()[0] == 0 assert self.database().con.execute(\ "select acq_reps_since_lapse from log where event_type=? and object_id='b7601e0c'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select acq_reps from log where event_type=? and object_id='b7601e0c' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select ret_reps from log where event_type=? and object_id='b7601e0c' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select acq_reps_since_lapse from log where event_type=? and object_id='b7601e0c' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 1 def test_logs_new_5(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "new_5.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 2 assert self.database().con.execute(\ "select acq_reps from log where event_type=? and object_id='9c8ce28e-1a4b-4148-8287-b8a7790d86d0.1.1'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select ret_reps from log where event_type=? and object_id='9c8ce28e-1a4b-4148-8287-b8a7790d86d0.1.1'", (EventTypes.REPETITION, )).fetchone()[0] == 0 assert self.database().con.execute(\ "select acq_reps_since_lapse from log where event_type=? and object_id='9c8ce28e-1a4b-4148-8287-b8a7790d86d0.1.1'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select acq_reps from log where event_type=? and object_id='9c8ce28e-1a4b-4148-8287-b8a7790d86d0.1.1' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 2 assert self.database().con.execute(\ """select ret_reps from log where event_type=? and object_id='9c8ce28e-1a4b-4148-8287-b8a7790d86d0.1.1' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 0 assert self.database().con.execute(\ """select acq_reps_since_lapse from log where event_type=? and object_id='9c8ce28e-1a4b-4148-8287-b8a7790d86d0.1.1' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 2 assert self.database().con.execute(\ """select object_id from log where event_type=?""", (EventTypes.STARTED_SCHEDULER, )).fetchone()[0] == "SM2 Mnemosyne" def test_logs_new_6(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "new_6.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 2 sql_res = self.database().con.execute(\ "select * from log where event_type=? and object_id='4c53e29a-f9e9-498b-8beb-d3a494f61bca.1.1'", (EventTypes.REPETITION, )).fetchone() assert sql_res[4] == 5 assert sql_res[5] == 2.5 assert sql_res[6] == 1 assert sql_res[7] == 0 assert sql_res[8] == 0 assert sql_res[9] == 1 assert sql_res[10] == 0 assert sql_res[11] == 0 assert sql_res[12] == 0 assert sql_res[14] - sql_res[2] == 345600 assert sql_res[13] == 0 sql_res = self.database().con.execute(\ """select * from log where event_type=? and object_id='4c53e29a-f9e9-498b-8beb-d3a494f61bca.1.1' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone() assert sql_res[4] == 2 assert sql_res[5] == 2.5 assert sql_res[6] == 1 assert sql_res[7] == 1 assert sql_res[8] == 0 assert sql_res[9] == 1 assert sql_res[10] == 1 assert sql_res[11] == 302986 assert sql_res[12] == 10 assert sql_res[14] - sql_res[2] == 475774 assert sql_res[13] == 1 def test_logs_imported_1(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "imported_1.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 3 assert self.database().con.execute(\ "select acq_reps from log where event_type=? and object_id='f5d9bbe7'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select ret_reps from log where event_type=? and object_id='f5d9bbe7'", (EventTypes.REPETITION, )).fetchone()[0] == 0 assert self.database().con.execute(\ "select acq_reps_since_lapse from log where event_type=? and object_id='f5d9bbe7'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select acq_reps from log where event_type=? and object_id='f5d9bbe7' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ """select ret_reps from log where event_type=? and object_id='f5d9bbe7' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 2 assert self.database().con.execute(\ """select acq_reps_since_lapse from log where event_type=? and object_id='f5d9bbe7' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 1 def test_logs_imported_2(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "imported_2.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select acq_reps from log where event_type=? and object_id='14670f10'", (EventTypes.REPETITION, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select ret_reps from log where event_type=? and object_id='14670f10'", (EventTypes.REPETITION, )).fetchone()[0] == 0 assert self.database().con.execute(\ "select acq_reps_since_lapse from log where event_type=? and object_id='14670f10'", (EventTypes.REPETITION, )).fetchone()[0] == 1 def test_logs_imported_3(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "imported_3.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 def test_restored_1(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "restored_1.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 1 sql_res = self.database().con.execute(\ "select * from log where event_type=?", (EventTypes.REPETITION, )).fetchone() assert sql_res[4] == 1 assert sql_res[5] == 2.36 assert sql_res[6] == 23 assert sql_res[7] == 8 assert sql_res[8] == 2 assert sql_res[9] == 0 assert sql_res[10] == 0 assert sql_res[11] == 89 * 24 * 60 * 60 assert sql_res[12] == 0 # No last rep data. assert sql_res[14] - sql_res[2] == 0 assert sql_res[13] == 5 def test_restored_2(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "restored_2.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 def test_logs_act_interval(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "actinterval_1.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ """select actual_interval from log where event_type=? and object_id='f1300e5a' order by _id desc limit 1""", (EventTypes.REPETITION, )).fetchone()[0] == 5 def test_logs_deleted(self): self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "delete_1.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.DELETED_CARD, )).fetchone()[0] == 1 def test_logs_corrupt_1(self): # Wrong data, missing creation event. self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "corrupt_1.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where object_id=?", ("4b59b830", )).fetchone()[0] == 3 def test_logs_corrupt_2(self): # Wrong data, isolated deletion event. self.database().update_card_after_log_import = (lambda x, y, z: 0) self.database().before_1x_log_import() filename = os.path.join(os.getcwd(), "tests", "files", "corrupt_2.txt") ScienceLogParser(self.database()).parse(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 0 assert self.database().con.execute(\ "select count() from log where object_id=?", ("4b59b830", )).fetchone()[0] == 0 def test_two_mem_files_sharing_same_logs(self): filename = os.path.join(os.getcwd(), "tests", "files", "basedir_2_mem", "deck1.mem") self.mem_importer().do_import(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 1 filename = os.path.join(os.getcwd(), "tests", "files", "basedir_2_mem", "deck2.mem") self.mem_importer().do_import(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 3 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 2 card = self.database().card("4c8fff73", is_id_internal=False) assert self.database().average_thinking_time(card) == 1.5 assert self.database().total_thinking_time(card) == 3.0 assert self.database().card_count_for_grade(0, active_only=True) == 2 tag = self.database().get_or_create_tag_with_name("666") assert self.database().card_count_for_grade_and_tag(0, tag, active_only=True) == 0 from mnemosyne.libmnemosyne.statistics_pages.grades import Grades page = Grades(self.mnemosyne.component_manager) page.prepare_statistics(tag._id) assert page.y == [0, 0, 0, 0, 0, 0, 0] page.prepare_statistics(-1) assert page.y == [0, 2, 0, 0, 0, 0, 0] def test_bz2(self): filename = os.path.join(os.getcwd(), "tests", "files", "basedir_bz2", "default.mem") self.mem_importer().do_import(filename) assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.REPETITION, )).fetchone()[0] == 0 assert self.database().con.execute(\ "select count() from log where event_type=?", (EventTypes.ADDED_CARD, )).fetchone()[0] == 1 assert self.database().con.execute(\ "select count() from log where object_id=?", ("82f2ed0d", )).fetchone()[0] == 0 def test_sch(self): self.controller().show_import_file_dialog() assert self.database().card_count_scheduled_n_days_ago(0) == 1 def test_upgrade(self): old_data_dir = os.path.join(os.getcwd(), "tests", "files", "basedir_bz2") from mnemosyne.libmnemosyne.upgrades.upgrade1 import Upgrade1 Upgrade1(self.mnemosyne.component_manager).upgrade_from_old_data_dir(old_data_dir) assert self.config()["dvipng"].rstrip() == \ "dvipng -D 300 -T tight tmp.dvi\necho" assert "14pt" in self.config()["latex_preamble"] assert self.config()["user_id"] == "f3fb13c7" assert self.log().log_index_of_last_upload() == 2 assert os.path.exists(os.path.join(old_data_dir, "DIRECTORY_NO_LONGER_USED_BY_MNEMOSYNE2")) assert os.path.exists(os.path.join(self.mnemosyne.config().data_dir, "history", "a_2.bz2")) log = file(os.path.join(self.mnemosyne.config().data_dir, "log.txt")) assert log.readline().strip() == \ "2005-11-01 09:29:08 : Imported item 82f2ed0d 0 0 0 0 0" def teardown(self): filename = os.path.join(os.getcwd(), "tests", "files", "basedir_bz2", "DIRECTORY_NO_LONGER_USED_BY_MNEMOSYNE2") if os.path.exists(filename): os.remove(filename) filename = os.path.join(os.getcwd(), "tests", "files", "a.png") if os.path.exists(filename): os.remove(filename) filename = os.path.join(os.getcwd(), "tests", "files", "a.ogg") if os.path.exists(filename): os.remove(filename) dirname = os.path.join(os.getcwd(), "tests", "files", "figs") if os.path.exists(dirname): shutil.rmtree(dirname) dirname = os.path.join(os.getcwd(), "tests", "files", "soundfiles") if os.path.exists(dirname): shutil.rmtree(dirname) MnemosyneTest.teardown(self)
class WebServer(Component): def __init__(self, component_manager, port, data_dir, config_dir, filename, is_server_local=False): Component.__init__(self, component_manager) self.port = port self.data_dir = data_dir self.config_dir = config_dir self.filename = filename self.is_server_local = is_server_local # When restarting the server, make sure we discard info from the # browser resending the form from the previous session. self.is_just_started = True self.is_mnemosyne_loaded = False self.is_shutting_down = False self.wsgi_server = wsgiserver.CherryPyWSGIServer(\ ("0.0.0.0", port), self.wsgi_app, server_name="localhost", numthreads=1, timeout=5) # We need to set the timeout relatively low, otherwise it will take # too long for the server to process a 'stop' request. def serve_until_stopped(self): try: self.wsgi_server.start() # Sets self.wsgi_server.ready except KeyboardInterrupt: self.wsgi_server.stop() self.unload_mnemosyne() def stop(self): self.wsgi_server.stop() self.unload_mnemosyne() def load_mnemosyne(self): self.mnemosyne = Mnemosyne(upload_science_logs=True, interested_in_old_reps=True) self.mnemosyne.components.insert( 0, (("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator"))) self.mnemosyne.components.append(\ ("mnemosyne.libmnemosyne.ui_components.main_widget", "MainWidget")) self.mnemosyne.components.append(\ ("mnemosyne.web_server.review_wdgt", "ReviewWdgt")) self.mnemosyne.components.append(\ ("mnemosyne.web_server.web_server_render_chain", "WebServerRenderChain")) self.mnemosyne.initialise(self.data_dir, config_dir=self.config_dir, filename=self.filename, automatic_upgrades=False) self.mnemosyne.review_controller().set_render_chain("web_server") self.save_after_n_reps = self.mnemosyne.config()["save_after_n_reps"] self.mnemosyne.config()["save_after_n_reps"] = 1 self.mnemosyne.start_review() self.mnemosyne.review_widget().set_is_server_local(\ self.is_server_local) self.is_mnemosyne_loaded = True self.release_database_after_timeout = \ ReleaseDatabaseAfterTimeout(self.port) self.release_database_after_timeout.start() def unload_mnemosyne(self): if not self.is_mnemosyne_loaded: return self.mnemosyne.config()["save_after_n_reps"] = self.save_after_n_reps self.mnemosyne.finalise() self.is_mnemosyne_loaded = False def wsgi_app(self, environ, start_response): filename = environ["PATH_INFO"].decode("utf-8") if filename == "/status": response_headers = [("Content-type", "text/html")] start_response("200 OK", response_headers) return ["200 OK"] # Sometimes, even after the user has clicked 'exit' in the page, # a browser sends a request for e.g. an audio file. if self.is_shutting_down and filename != "/release_database": response_headers = [("Content-type", "text/html")] start_response("503 Service Unavailable", response_headers) return ["Server stopped"] # Load database if needed. if not self.is_mnemosyne_loaded and filename != "/release_database": self.load_mnemosyne() self.release_database_after_timeout.ping() # All our request return to the root page, so if the path is '/', # return the html of the review widget. if filename == "/": # Process clicked buttons in the form. form = cgi.FieldStorage(fp=environ["wsgi.input"], environ=environ) if "show_answer" in form and not self.is_just_started: self.mnemosyne.review_widget().show_answer() page = self.mnemosyne.review_widget().to_html() elif "grade" in form and not self.is_just_started: grade = int(form["grade"].value) self.mnemosyne.review_widget().grade_answer(grade) page = self.mnemosyne.review_widget().to_html() elif "star" in form: self.mnemosyne.controller().star_current_card() page = self.mnemosyne.review_widget().to_html() elif "exit" in form: self.unload_mnemosyne() page = "Server stopped" self.wsgi_server.stop() self.stop_server_after_timeout = \ StopServerAfterTimeout(self.wsgi_server) self.stop_server_after_timeout.start() self.is_shutting_down = True else: page = self.mnemosyne.review_widget().to_html() if self.is_just_started: self.is_just_started = False # Serve the web page. response_headers = [("Content-type", "text/html")] start_response("200 OK", response_headers) return [page] elif filename == "/release_database": self.unload_mnemosyne() response_headers = [("Content-type", "text/html")] start_response("200 OK", response_headers) return ["200 OK"] # We need to serve a media file. else: full_path = self.mnemosyne.database().media_dir() for word in filename.split("/"): full_path = os.path.join(full_path, word) request = Request(environ) # Check if file exists, but work around Android not reporting # the correct filesystem encoding. try: exists = os.path.exists(full_path) except (UnicodeEncodeError, UnicodeDecodeError): _ENCODING = sys.getfilesystemencoding() or \ locale.getdefaultlocale()[1] or "utf-8" full_path = full_path.encode(_ENCODING) if os.path.exists(full_path): etag = "%s-%s-%s" % (os.path.getmtime(full_path), os.path.getsize(full_path), hash(full_path)) else: etag = "none" app = FileApp(full_path, etag=etag) return app(request)(environ, start_response)
class MyClient(Client): program_name = "Mnemosyne" program_version = "test" capabilities = "TODO" def __init__(self): shutil.rmtree(os.path.abspath("dot_sync_client"), ignore_errors=True) self.mnemosyne = Mnemosyne(upload_science_logs=False, interested_in_old_reps=True) self.mnemosyne.components = [ ("mnemosyne.libmnemosyne.translator", "NoTranslation"), ("mnemosyne.libmnemosyne.databases.SQLite", "SQLite"), ("mnemosyne.libmnemosyne.configuration", "Configuration"), ("mnemosyne.libmnemosyne.loggers.database_logger", "DatabaseLogger"), ("mnemosyne.libmnemosyne.schedulers.SM2_mnemosyne", "SM2Mnemosyne"), ("mnemosyne.libmnemosyne.stopwatch", "Stopwatch"), ("mnemosyne.libmnemosyne.card_types.front_to_back", "FrontToBack"), ("mnemosyne.libmnemosyne.card_types.both_ways", "BothWays"), ("mnemosyne.libmnemosyne.card_types.vocabulary", "Vocabulary"), ("mnemosyne.libmnemosyne.renderers.html_css", "HtmlCss"), ("mnemosyne.libmnemosyne.filters.escape_to_html", "EscapeToHtml"), ("mnemosyne.libmnemosyne.filters.expand_paths", "ExpandPaths"), ("mnemosyne.libmnemosyne.filters.latex", "Latex"), ("mnemosyne.libmnemosyne.render_chains.default_render_chain", "DefaultRenderChain"), ("mnemosyne.libmnemosyne.render_chains.plain_text_chain", "PlainTextChain"), ("mnemosyne.libmnemosyne.controllers.default_controller", "DefaultController"), ("mnemosyne.libmnemosyne.review_controllers.SM2_controller", "SM2Controller"), ("mnemosyne.libmnemosyne.card_types.map", "MapPlugin"), ("mnemosyne.libmnemosyne.card_types.cloze", "ClozePlugin"), ("mnemosyne.libmnemosyne.criteria.default_criterion", "DefaultCriterion"), ("mnemosyne.libmnemosyne.databases.SQLite_criterion_applier", "DefaultCriterionApplier"), ("mnemosyne.libmnemosyne.plugins.cramming_plugin", "CrammingPlugin") ] self.mnemosyne.components.append(("benchmark_sync_client", "Widget")) self.mnemosyne.components.append(("benchmark_sync_client", "MyReviewWidget")) self.mnemosyne.initialise(os.path.abspath(os.path.join(os.getcwdu(), "dot_sync_client")), automatic_upgrades=False) self.mnemosyne.config().change_user_id("user_id") self.check_for_edited_local_media_files = False self.do_backup = False self.mnemosyne.review_controller().reset() # Do 200 reviews. card_type = self.mnemosyne.card_type_with_id("1") fact_data = {"f": "question", "b": "answer"} card = self.mnemosyne.controller().create_new_cards(fact_data, card_type, grade=-1, tag_names=["default"])[0] self.mnemosyne.database().save() self.mnemosyne.review_controller().show_new_question() for i in range(200): self.mnemosyne.review_controller().show_answer() self.mnemosyne.review_controller().grade_answer(0) Client.__init__(self, "client_machine_id", self.mnemosyne.database(), self.mnemosyne.main_widget()) def do_sync(self): #self.BUFFER_SIZE = 10*8192 #self.behind_proxy = True self.sync("localhost", 8186, "user", "pass") self.mnemosyne.database().save()
"ThreeSided"), ("mnemosyne.libmnemosyne.renderers.html_css_old", "HtmlCssOld"), ("mnemosyne.libmnemosyne.filters.escape_to_html", "EscapeToHtml"), ("mnemosyne.libmnemosyne.filters.latex", "Latex"), ("mnemosyne.libmnemosyne.filters.expand_paths", "ExpandPaths"), ("mnemosyne.libmnemosyne.controllers.default_controller", "DefaultController"), ("mnemosyne.libmnemosyne.review_controllers.SM2_controller", "SM2Controller"), ("mnemosyne.libmnemosyne.card_types.map", "MapPlugin"), ("mnemosyne.libmnemosyne.card_types.cloze", "ClozePlugin"), ("mnemosyne.libmnemosyne.activity_criteria.default_criterion", "DefaultCriterion"), ("mnemosyne.libmnemosyne.databases.SQLite_criterion_applier", "DefaultCriterionApplier"), ("mnemosyne.libmnemosyne.plugins.cramming_plugin", "CrammingPlugin") ] # Run Mnemosyne. mnemosyne.initialise(basedir=basedir) mnemosyne.config()["upload_science_logs"] = False app.mainframe = mnemosyne.main_widget() app.run() mnemosyne.finalise()
class TestLogging(MnemosyneTest): def restart(self): self.mnemosyne = Mnemosyne(upload_science_logs=False, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert(0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.components.append(\ ("test_logging", "MyMainWidget")) self.mnemosyne.components.append(\ ("mnemosyne_test", "TestReviewWidget")) self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) self.mnemosyne.start_review() def test_logging(self): card_type = self.card_type_with_id("1") fact_data = {"f": "1", "b": "b"} card = self.controller().create_new_cards(fact_data, card_type, grade=-1, tag_names=["default"])[0] card_id_1 = card.id self.review_controller().show_new_question() self.review_controller().grade_answer(0) self.review_controller().show_new_question() self.review_controller().grade_answer(1) self.review_controller().grade_answer(4) self.mnemosyne.finalise() self.restart() card_type = self.card_type_with_id("1") fact_data = {"f": "2", "b": "b"} card = self.controller().create_new_cards(fact_data, card_type, grade=-1, tag_names=["default"])[0] self.review_controller().show_new_question() self.controller().delete_current_card() self.log().dump_to_science_log() sql_res = self.database().con.execute(\ "select * from log where _id=1").fetchone() assert sql_res[1] == EventTypes.STARTED_PROGRAM sql_res = self.database().con.execute(\ "select * from log where _id=2").fetchone() assert sql_res[1] == EventTypes.STARTED_SCHEDULER sql_res = self.database().con.execute(\ "select * from log where _id=3").fetchone() assert sql_res[1] == EventTypes.LOADED_DATABASE assert sql_res[6] == 0 assert sql_res[7] == 0 assert sql_res[8] == 0 sql_res = self.database().con.execute(\ "select * from log where _id=11").fetchone() assert sql_res[1] == EventTypes.ADDED_TAG assert sql_res[3] is not None sql_res = self.database().con.execute(\ "select * from log where _id=12").fetchone() assert sql_res[1] == EventTypes.EDITED_CRITERION assert sql_res[3] is not None sql_res = self.database().con.execute(\ "select * from log where _id=13").fetchone() assert sql_res[1] == EventTypes.ADDED_FACT assert sql_res[3] is not None sql_res = self.database().con.execute(\ "select * from log where _id=14").fetchone() assert sql_res[1] == EventTypes.ADDED_CARD assert sql_res[3] is not None sql_res = self.database().con.execute(\ "select * from log where _id=15").fetchone() assert sql_res[1] == EventTypes.REPETITION assert sql_res[6] == 1 assert sql_res[7] == 0 assert sql_res[11] == 0 assert sql_res[12] == 0 assert sql_res[14] - sql_res[2] == 0 assert sql_res[13] == 0 assert sql_res[3] is not None sql_res = self.database().con.execute(\ "select * from log where _id=16").fetchone() assert sql_res[1] == EventTypes.REPETITION assert sql_res[6] == 2 assert sql_res[7] == 0 assert sql_res[11] == 0 assert sql_res[12] <= 10 # Depends on CPU load. assert sql_res[14] - sql_res[2] == 0 assert sql_res[13] == 0 sql_res = self.database().con.execute(\ "select * from log where _id=17").fetchone() assert sql_res[1] == EventTypes.REPETITION assert sql_res[6] == 3 assert sql_res[7] == 0 assert sql_res[11] == 0 assert sql_res[12] <= 10 # Depends on CPU load. new_interval = sql_res[14] - sql_res[2] assert new_interval > 0 assert sql_res[13] == 0 sql_res = self.database().con.execute(\ "select * from log where _id=18").fetchone() assert sql_res[1] == EventTypes.SAVED_DATABASE assert sql_res[6] == 0 assert sql_res[7] == 0 assert sql_res[8] == 1 sql_res = self.database().con.execute(\ "select * from log where _id=19").fetchone() assert sql_res[1] == EventTypes.STOPPED_PROGRAM sql_res = self.database().con.execute(\ "select * from log where _id=20").fetchone() assert sql_res[1] == EventTypes.STARTED_PROGRAM sql_res = self.database().con.execute(\ "select * from log where _id=21").fetchone() assert sql_res[1] == EventTypes.STARTED_SCHEDULER sql_res = self.database().con.execute(\ "select * from log where _id=22").fetchone() assert sql_res[1] == EventTypes.LOADED_DATABASE assert sql_res[6] == 0 assert sql_res[7] == 0 assert sql_res[8] == 1 sql_res = self.database().con.execute(\ "select * from log where _id=30").fetchone() assert sql_res[1] == EventTypes.ADDED_FACT assert sql_res[3] is not None sql_res = self.database().con.execute(\ "select * from log where _id=31").fetchone() assert sql_res[1] == EventTypes.ADDED_CARD sql_res = self.database().con.execute(\ "select * from log where _id=32").fetchone() assert sql_res[1] == EventTypes.DELETED_CARD assert sql_res[3] is not None sql_res = self.database().con.execute(\ "select * from log where _id=33").fetchone() assert sql_res[1] == EventTypes.DELETED_FACT assert sql_res[3] is not None self.config()["upload_science_logs"] = True self.database().dump_to_science_log() logfile = os.path.join(os.path.abspath("dot_test"), "log.txt") found = False for line in file(logfile): if "R " + card_id_1 + " 4" in line: found = True assert str(new_interval) + " 0 | 0.0" in line assert found == True def test_unique_index(self): fact_data = {"f": "question", "b": "answer"} card_type_2 = self.card_type_with_id("2") card_1, card_2 = self.controller().create_new_cards(fact_data, card_type_2, grade=-1, tag_names=["default"]) log_index = self.database().con.execute(\ """select _id from log order by _id desc limit 1""").fetchone()[0] # Note: we need to keep the last log entry intact, otherwise indexes # start again at 1 and mess up the sync. self.database().con.execute("""delete from log where _id <?""", (log_index,)) self.database().con.execute("""vacuum""") fact_data = {"f": "question2", "b": "answer2"} card_type_2 = self.card_type_with_id("1") card_1 = self.controller().create_new_cards(fact_data, card_type_2, grade=-1, tag_names=["default"]) assert self.database().con.execute(\ """select _id from log order by _id limit 1""").fetchone()[0] \ == log_index def test_recover_user_id(self): assert self.config()["user_id"] is not None MnemosyneTest.teardown(self) file(os.path.join(os.getcwd(), "dot_test", "history", "userid_001.bz2"), "w") os.remove(os.path.join(os.getcwd(), "dot_test", "config.db")) self.mnemosyne = Mnemosyne(upload_science_logs=False, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert(0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.components.append(\ ("test_logging", "MyMainWidget")) self.mnemosyne.components.append(\ ("mnemosyne_test", "TestReviewWidget")) self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) assert self.config()["user_id"] == "userid" def test_recover_user_id_2(self): assert self.config()["user_id"] is not None MnemosyneTest.teardown(self) file(os.path.join(os.getcwd(), "dot_test", "history", "userid_machine_001.bz2"), "w") os.remove(os.path.join(os.getcwd(), "dot_test", "config.db")) self.mnemosyne = Mnemosyne(upload_science_logs=False, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert(0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.components.append(\ ("test_logging", "MyMainWidget")) self.mnemosyne.components.append(\ ("mnemosyne_test", "TestReviewWidget")) self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) assert self.config()["user_id"] == "userid" def test_log_index_of_last_upload_1(self): assert self.log().log_index_of_last_upload() == 0 def test_log_index_of_last_upload_2(self): machine_id = self.config().machine_id() for filename in ["user_001.bz2", "user_%s_2.bz2" % machine_id]: file(os.path.join(os.getcwd(), "dot_test", "history", filename), "w") assert self.log().log_index_of_last_upload() == 2 def test_log_index_of_last_upload_3(self): machine_id = self.config().machine_id() for filename in ["user_001.bz2"]: file(os.path.join(os.getcwd(), "dot_test", "history", filename), "w") assert self.log().log_index_of_last_upload() == 1 def test_log_index_of_last_upload_4(self): machine_id = self.config().machine_id() for filename in ["user_005.bz2"]: file(os.path.join(os.getcwd(), "dot_test", "history", filename), "w") assert self.log().log_index_of_last_upload() == 5 def test_log_index_of_last_upload_5(self): machine_id = self.config().machine_id() for filename in ["user_othermachine_005.bz2"]: file(os.path.join(os.getcwd(), "dot_test", "history", filename), "w") assert self.log().log_index_of_last_upload() == 0 def test_log_index_of_last_upload_6(self): machine_id = self.config().machine_id() for filename in ["user_othermachine_005.bz2", "user_%s_2.bz2" % machine_id]: file(os.path.join(os.getcwd(), "dot_test", "history", filename), "w") assert self.log().log_index_of_last_upload() == 2 def test_log_index_of_last_upload_7(self): machine_id = self.config().machine_id() for filename in ["user_001.bz2", "user_othermachine_005.bz2", "user_%s_2.bz2" % machine_id]: file(os.path.join(os.getcwd(), "dot_test", "history", filename), "w") assert self.log().log_index_of_last_upload() == 2 def test_log_upload(self): machine_id_file = os.path.join(self.mnemosyne.config().config_dir, "machine.id") f = file(machine_id_file, "w") print >> f, "TESTMACHINE" f.close() self.config().change_user_id("UPLOADTEST") self.config()["max_log_size_before_upload"] = 1 MnemosyneTest.teardown(self) self.mnemosyne = Mnemosyne(upload_science_logs=True, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert(0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.components.append(\ ("test_logging", "MyMainWidget")) self.mnemosyne.components.append(\ ("mnemosyne_test", "TestReviewWidget")) self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) self.mnemosyne.start_review() MnemosyneTest.teardown(self) self.mnemosyne = Mnemosyne(upload_science_logs=True, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert(0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.components.append(\ ("test_logging", "MyMainWidget")) self.mnemosyne.components.append(\ ("mnemosyne_test", "TestReviewWidget")) self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) self.mnemosyne.start_review() MnemosyneTest.teardown(self) self.mnemosyne = Mnemosyne(upload_science_logs=True, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert(0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.components.append(\ ("test_logging", "MyMainWidget")) self.mnemosyne.components.append(\ ("mnemosyne_test", "TestReviewWidget")) self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) self.mnemosyne.start_review() def test_log_upload_bad_server(self): # Most reliable way of setting this variable is throug config.py, otherwise # it will stay alive in a dangling imported userconfig. config_py_file = os.path.join(self.mnemosyne.config().config_dir, "config.py") f = file(config_py_file, "w") print >> f, "science_server = \"noserver:80\"" f.close() machine_id_file = os.path.join(self.mnemosyne.config().config_dir, "machine.id") f = file(machine_id_file, "w") print >> f, "TESTMACHINE" f.close() self.config().change_user_id("UPLOADTEST") self.config()["max_log_size_before_upload"] = 1 MnemosyneTest.teardown(self) self.mnemosyne = Mnemosyne(upload_science_logs=True, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert(0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.components.append(\ ("test_logging", "MyMainWidget")) self.mnemosyne.components.append(\ ("mnemosyne_test", "TestReviewWidget")) self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) self.mnemosyne.start_review() MnemosyneTest.teardown(self) self.mnemosyne = Mnemosyne(upload_science_logs=True, interested_in_old_reps=True, asynchronous_database=True) self.mnemosyne.components.insert(0, ("mnemosyne.libmnemosyne.translators.gettext_translator", "GetTextTranslator")) self.mnemosyne.components.append(\ ("test_logging", "MyMainWidget")) self.mnemosyne.components.append(\ ("mnemosyne_test", "TestReviewWidget")) self.mnemosyne.initialise(os.path.abspath("dot_test"), automatic_upgrades=False) self.mnemosyne.start_review() def mem_importer(self): for format in self.mnemosyne.component_manager.all("file_format"): if format.__class__.__name__ == "Mnemosyne1Mem": return format def test_archive_old_logs(self): # Import old history. filename = os.path.join(os.getcwd(), "tests", "files", "basedir_bz2", "default.mem") self.mem_importer().do_import(filename) assert self.database().con.execute("select count() from log").fetchone()[0] == 23 assert not os.path.exists(os.path.join("dot_test", "archive")) # Archive. self.database().archive_old_logs() assert self.database().con.execute("select count() from log").fetchone()[0] == 12 archive_name = os.listdir(os.path.join(os.getcwd(), "dot_test", "archive"))[0] archive_path = os.path.join(os.getcwd(), "dot_test", "archive", archive_name) import sqlite3 arch_con = sqlite3.connect(archive_path) assert arch_con.execute("select count() from log").fetchone()[0] == 11
class MyClient(Client): program_name = "Mnemosyne" program_version = "test" capabilities = "TODO" def __init__(self): self.mnemosyne = Mnemosyne() self.mnemosyne.components = [ ("mnemosyne.libmnemosyne.translator", "NoTranslation"), ("mnemosyne.libmnemosyne.databases.SQLite", "SQLite"), ("mnemosyne.libmnemosyne.configuration", "Configuration"), ("mnemosyne.libmnemosyne.loggers.database_logger", "DatabaseLogger"), ("mnemosyne.libmnemosyne.schedulers.SM2_mnemosyne", "SM2Mnemosyne"), ("mnemosyne.libmnemosyne.stopwatch", "Stopwatch"), ("mnemosyne.libmnemosyne.card_types.front_to_back", "FrontToBack"), ("mnemosyne.libmnemosyne.card_types.both_ways", "BothWays"), ("mnemosyne.libmnemosyne.card_types.three_sided", "ThreeSided"), ("mnemosyne.libmnemosyne.renderers.html_css_old", "HtmlCssOld"), ("mnemosyne.libmnemosyne.filters.escape_to_html", "EscapeToHtml"), ("mnemosyne.libmnemosyne.filters.expand_paths", "ExpandPaths"), ("mnemosyne.libmnemosyne.filters.latex", "Latex"), ("mnemosyne.libmnemosyne.controllers.default_controller", "DefaultController"), ("mnemosyne.libmnemosyne.review_controllers.SM2_controller", "SM2Controller"), ("mnemosyne.libmnemosyne.card_types.map", "MapPlugin"), ("mnemosyne.libmnemosyne.card_types.cloze", "ClozePlugin"), ("mnemosyne.libmnemosyne.activity_criteria.default_criterion", "DefaultCriterion"), ("mnemosyne.libmnemosyne.databases.SQLite_criterion_applier", "DefaultCriterionApplier"), ("mnemosyne.libmnemosyne.plugins.cramming_plugin", "CrammingPlugin") ] self.mnemosyne.components.append(("benchmark_sync_client", "Widget")) self.mnemosyne.components.append(\ ("mnemosyne.libmnemosyne.ui_components.review_widget", "ReviewWidget")) self.mnemosyne.components.append(\ ("mnemosyne.libmnemosyne.ui_components.dialogs", "ProgressDialog")) self.mnemosyne.initialise(os.path.abspath(os.path.join(os.getcwdu(), "dot_benchmark"))) self.mnemosyne.config().change_user_id("user_id") self.mnemosyne.review_controller().reset() # Do 200 reviews. card_type = self.mnemosyne.card_type_by_id("1") fact_data = {"q": "question", "a": "answer"} card = self.mnemosyne.controller().create_new_cards(fact_data, card_type, grade=-1, tag_names=["default"])[0] self.mnemosyne.database().save() self.mnemosyne.review_controller().new_question() for i in range(200): self.mnemosyne.review_controller().show_answer() self.mnemosyne.review_controller().grade_answer(0) Client.__init__(self, "client_machine_id", self.mnemosyne.database(), self.mnemosyne.main_widget()) def do_sync(self): self.sync("192.168.2.54", 8186, "user", "pass") self.mnemosyne.database().save()