コード例 #1
0
 def card_from_log_entry(self, log_entry):
     # We should not receive cards with question and answer data, only
     # cards based on facts.
     if "f" in log_entry:
         raise AttributeError
     # Get card object to be deleted now.
     if log_entry["type"] == EventTypes.DELETED_CARD:
         try:
             return self.card(log_entry["o_id"], is_id_internal=False)
         except MnemosyneError:  # There is no fact in the database.
             # We have created and deleted this card since the last sync,
             # so we just return an empty shell.
             card_type = self.card_type_with_id("1")
             fact = Fact({"f": "f", "b": "b"}, id="")
             card = Card(card_type,
                         fact,
                         card_type.fact_views[0],
                         creation_time=0)
             card._id = None  # Signals special case to 'delete_card'.
             card.id = log_entry["o_id"]
             return card
     # Create an empty shell of card object that will be deleted later
     # during this sync.
     if "tags" not in log_entry:
         card_type = self.card_type_with_id("1")
         fact = Fact({"f": "f", "b": "b"}, id="")
         card = Card(card_type,
                     fact,
                     card_type.fact_views[0],
                     creation_time=0)
         card.id = log_entry["o_id"]
         return card
     # Create card object.
     if "card_t" not in log_entry:
         # Client only supports simple cards.
         card_type = self.card_type_with_id("1")
     else:
         if log_entry["card_t"] not in \
             self.component_manager.card_type_with_id:
             # If the card type is not in the database, it's possible that
             # the data for this card type will follow later during the
             # sync. In that case, create a dummy card type here, which
             # will be corrected by a later edit event. Hovewer, we still
             # need to instantiate this card type later, so that we can
             # catch errors, e.g. due to bad plugins.
             try:
                 self.activate_plugins_for_card_type_with_id\
                     (log_entry["card_t"])
                 card_type = self.card_type_with_id\
                     (log_entry["card_t"])
             except:
                 self.card_types_to_instantiate_later.add(\
                     log_entry["card_t"])
                 card_type = self.card_type_with_id("1")
                 log_entry["fact_v"] = card_type.fact_views[0].id
         else:
             card_type = self.card_type_with_id(log_entry["card_t"])
     fact = self.fact(log_entry["fact"], is_id_internal=False)
     # When importing, set the creation time to the current time.
     if self.importing and not self.importing_with_learning_data:
         log_entry["c_time"] = int(time.time())
         log_entry["m_time"] = int(time.time())
     for fact_view in card_type.fact_views:
         if fact_view.id == log_entry["fact_v"]:
             card = Card(card_type,
                         fact,
                         fact_view,
                         creation_time=log_entry["c_time"])
             break
     for tag_id in log_entry["tags"].split(","):
         if self.has_tag_with_id(tag_id):
             card.tags.add(self.tag(tag_id, is_id_internal=False))
         else:
             # The tag has been deleted later later during the log. Don't
             # worry about it now, this will be corrected by a later
             # EDITED_CARD event.
             pass
     if self.importing:
         if len(self.extra_tags_on_import) != 0:
             card.tags.discard(\
                 self.tag("__UNTAGGED__", is_id_internal=False))
         for tag in self.extra_tags_on_import:
             card.tags.add(tag)
     # Construct rest of card. The 'active' property does not need to be
     # handled here, as default criterion will be applied to the card
     # in the database functions.
     card.id = log_entry["o_id"]
     if (log_entry["type"] != EventTypes.ADDED_CARD) or self.importing:
         if self.has_card_with_id(card.id):
             card._id = self.con.execute("select _id from cards where id=?",
                                         (card.id, )).fetchone()[0]
         else:
             # Importing a card for the first time, so it is not yet in the
             # database.
             pass
     card.modification_time = log_entry["m_time"]
     card.grade = log_entry["gr"]
     card.easiness = log_entry["e"]
     card.acq_reps = log_entry["ac_rp"]
     card.ret_reps = log_entry["rt_rp"]
     card.lapses = log_entry["lps"]
     card.acq_reps_since_lapse = log_entry["ac_rp_l"]
     card.ret_reps_since_lapse = log_entry["rt_rp_l"]
     card.last_rep = log_entry["l_rp"]
     card.next_rep = log_entry["n_rp"]
     if "sch_data" in log_entry:
         card.scheduler_data = log_entry["sch_data"]
     if "extra" in log_entry:
         card.extra_data = eval(log_entry["extra"])
     return card
コード例 #2
0
ファイル: SQLite_sync.py プロジェクト: bartosh/pomni
 def card_from_log_entry(self, log_entry):
     # We should not receive cards with question and answer data, only
     # cards based on facts.
     if "q" in log_entry:
         raise AttributeError
     # Get card object to be deleted now.
     if log_entry["type"] == EventTypes.DELETED_CARD:
         try:
             # More future-proof version of code in the except statement.
             # However, this will fail if after the last sync the other
             # partner created and deleted this card, so that there is no
             # fact information.
             return self.get_card(log_entry["o_id"], id_is_internal=False)
         except TypeError:
             # Less future-proof version which just returns an empty shell.
             # Make sure to set _id, though, as that will be used in
             # actually deleting the card.
             sql_res = self.con.execute("select * from cards where id=?",
                 (log_entry["o_id"], )).fetchone()
             card_type = self.card_type_by_id("1")
             fact = Fact({}, card_type, creation_time=0, id="")
             card = Card(fact, card_type.fact_views[0])
             card._id = sql_res["_id"]
             return card
     # Create an empty shell of card object that will be deleted later
     # during this sync.
     if "tags" not in log_entry:
         card_type = self.card_type_by_id("1")
         fact = Fact({}, card_type, creation_time=0, id="")
         card = Card(fact, card_type.fact_views[0])
         card.id = log_entry["o_id"]
         return card
     # Create card object.
     fact = self.get_fact(log_entry["fact"], id_is_internal=False)
     for fact_view in fact.card_type.fact_views:
         if fact_view.id == log_entry["fact_v"]:
             card = Card(fact, fact_view)
             break
     for tag_id in log_entry["tags"].split(","):
         try:
             card.tags.add(self.get_tag(tag_id, id_is_internal=False))
         except TypeError:
             # The tag has been later later during the log. Don't worry
             # about it now, this will be corrected by a later
             # UPDATED_CARD event.
             pass
     card.id = log_entry["o_id"]
     if log_entry["type"] != EventTypes.ADDED_CARD:
         card._id = self.con.execute("select _id from cards where id=?",
             (card.id, )).fetchone()[0]      
     card.active = True
     card.grade = log_entry["gr"]
     card.easiness = log_entry["e"]
     card.acq_reps = log_entry["ac_rp"]
     card.ret_reps = log_entry["rt_rp"]
     card.lapses = log_entry["lps"]
     card.acq_reps_since_lapse = log_entry["ac_rp_l"]
     card.ret_reps_since_lapse = log_entry["rt_rp_l"]
     card.last_rep = log_entry["l_rp"]
     card.next_rep = log_entry["n_rp"]
     if "sch_data" in log_entry:
         card.scheduler_data = log_entry["sch_data"]   
     if "extra" in log_entry:
         card.extra_data = eval(log_entry["extra"])
     return card
コード例 #3
0
    def do_import(self, filename, extra_tag_names=""):
        self.main_widget().show_information(_(\
"Note that while you can edit imported cards, adding new cards to Anki's card types is currently not supported.\n\nAlso, in case you run into problems, don't hesitate to contact the developers."))
        FileFormat.do_import(self, filename, extra_tag_names)
        w = self.main_widget()
        db = self.database()
        # Preprocess apkg files.
        tmp_dir = None
        if filename.endswith(".apkg"):
            tmp_dir = self.extract_apkg(filename)
            filename = os.path.join(tmp_dir, "collection.anki2")
        # Set up tag cache.
        tag_with_name = TagCache(self.component_manager)
        # Open database.
        con = sqlite3.connect(filename)
        # Copy media directory.
        w.set_progress_text(_("Copying media files..."))
        src = filename.replace(".anki2", ".media")
        dst = db.media_dir()
        number_of_files = len(os.listdir(src))
        w.set_progress_range(number_of_files)
        w.set_progress_update_interval(number_of_files / 50)
        for item in os.listdir(src):
            shutil.copy(os.path.join(src, item), os.path.join(dst, item))
            w.increase_progress(1)
        # Import collection table.
        w.set_progress_text(_("Importing card types..."))
        # Too few in number to warrant counted progress bar.
        card_type_for_mid = {}  # mid: model id
        deck_name_for_did = {}  # did: deck id
        for id, crt, mod, scm, ver, dty, usn, ls, conf, models, decks, \
            dconf, tags in con.execute("""select id, crt, mod, scm, ver, dty,
            usn, ls, conf, models, decks, dconf, tags from col"""):
            # mod: modification time, ignore.
            # scm: schema modification time, ignore.
            # ver: schema version, ignore.
            # dty: no longer used according to Anki source.
            # usn: syncing related, ignore.
            # ls: last sync, ignore.
            # conf: configuration, ignore.
            # dconf: deck configuration, ignore.
            # tags: list of tags, but they turn up later in the notes, ignore.
            collection_creation_time = crt
            decks = json.loads(decks)
            # Decks will be converted to Tags when creating cards.
            for did in decks:
                deck_name_for_did[int(did)] = decks[did]["name"]
            # Models will be converted to CardTypes
            models = json.loads(models)
            for mid in models:  # mid: model id
                card_type_id = "7::" + mid
                card_type_already_imported = \
                    db.has_card_type_with_id(card_type_id)
                if card_type_already_imported:
                    card_type = self.component_manager.card_type_with_id[\
                        card_type_id]
                else:
                    card_type = MSided(self.component_manager)
                card_type.name = models[mid]["name"]
                card_type.id = card_type_id
                card_type.hidden_from_UI = False
                card_type_for_mid[int(mid)] = card_type
                vers = models[mid]["vers"]  # Version, ignore.
                tags = models[mid]["tags"]  # Seems empty, ignore.
                did = models[mid]["did"]  # Deck id, ignore.
                usn = models[mid]["usn"]  # Syncing related, ignore.
                if "req" in models[mid]:
                    required = models[mid]["req"]
                    # Cache for a calculation to determine which fields are
                    # required. "req": [[0, "all", [0]]]
                    # Not yet implemented.
                else:
                    required = []
                flds = models[mid]["flds"]
                flds.sort(key=lambda x: x["ord"])
                card_type.fact_keys_and_names = []
                for field in flds:
                    card_type.fact_keys_and_names.append(\
                        (str(field["ord"]), field["name"]))
                    media = field["media"]  # Reserved for future use, ignore.
                    sticky = field["sticky"]  # Sticky field, ignore.
                    rtl = field["rtl"]  # Text direction, ignore.
                    font_string = field["font"] + "," + str(field["size"]) + \
                        ",-1,5,50,0,0,0,0,0,Regular"
                    self.config().set_card_type_property(
                        "font", font_string, card_type, str(field["ord"]))
                sortf = models[mid]["sortf"]  # Sorting field, ignore.
                tmpls = models[mid]["tmpls"]
                tmpls.sort(key=lambda x: x["ord"])
                # Fact views.
                card_type.fact_views = []
                for template in tmpls:
                    fact_view_id = card_type.id + "." + str(template["ord"])
                    fact_view_already_imported = \
                        db.has_fact_view_with_id(fact_view_id)
                    if fact_view_already_imported:
                        fact_view = db.fact_view(\
                            fact_view_id, is_id_internal=False)
                        fact_view.name = template["name"]
                    else:
                        fact_view = FactView(template["name"], fact_view_id)
                    fact_view.extra_data["qfmt"] = template["qfmt"]
                    fact_view.extra_data["afmt"] = template["afmt"]
                    fact_view.extra_data["bqfmt"] = template["bqfmt"]
                    fact_view.extra_data["bafmt"] = template["bafmt"]
                    fact_view.extra_data["ord"] = template["ord"]
                    did = template["did"]  # Deck id, ignore.
                    card_type.fact_views.append(fact_view)
                    if fact_view_already_imported:
                        db.update_fact_view(fact_view)
                    else:
                        db.add_fact_view(fact_view)
                mod = models[mid]["mod"]  # Modification time, ignore.
                type_ = models[mid]["type"]  # 0: standard, 1 cloze
                id = models[mid]["id"]
                css = models[mid]["css"]
                latex_preamble = models[mid]["latexPre"]  # Ignore.
                latex_postamble = models[mid]["latexPost"]  # Ignore.
                # Save to database.
                card_type.extra_data = {"css": css, "id": id, "type": type_}
                if card_type_already_imported:
                    db.update_card_type(card_type)
                else:
                    db.add_card_type(card_type)
        # nid are Anki-internal indices for notes, so we need to temporarily
        # store some information.
        tag_names_for_nid = {}
        card_type_for_nid = {}
        # Import facts and tags.
        w.set_progress_text(_("Importing notes..."))
        number_of_notes = con.execute(
            "select count() from notes").fetchone()[0]
        w.set_progress_range(number_of_notes)
        w.set_progress_update_interval(number_of_notes / 20)
        fact_for_nid = {}
        modification_time_for_nid = {}
        for id, guid, mid, mod, usn, tags, flds, sfld, csum, flags, data in \
            con.execute("""select id, guid, mid, mod, usn, tags, flds, sfld,
            csum, flags, data from notes"""):
            # usn: syncing related, ignore.
            # sfld: sorting field, ignore.
            # csum: checksum, ignore.
            # flags: seems empty, ignore.
            # data: seems empty, ignore.
            # Make compatible with openSM2sync:
            guid = guid.replace("`", "ap").replace("\"", "qu")
            guid = guid.replace("&", "am").replace("<",
                                                   "lt").replace(">", "gt")
            modification_time_for_nid[id] = mod
            card_type = card_type_for_mid[int(mid)]
            card_type_for_nid[id] = card_type
            fields = flds.split("\x1f")
            assert (len(fields) == len(card_type.fact_keys_and_names))
            fact_data = {}
            for i in range(len(fields)):
                fact_key = card_type.fact_keys_and_names[i][0]
                data = fields[i]
                # Deal with sound tags.
                for match in sound_re.finditer(data):
                    fname = match.group("fname")
                    data = data.replace(\
                        match.group(0), "<audio src=\"" + fname + "\">")
                # Deal with latex tags.
                data = data.replace("[latex]", "<latex>")
                data = data.replace("[/latex]", "</latex>")
                data = data.replace("[$]", "<$>")
                data = data.replace("[/$]", "</$>")
                data = data.replace("[$$]", "<$$>")
                data = data.replace("[/$$]", "</$$>")
                fact_data[fact_key] = data
            if db.has_fact_with_id(guid):
                fact = db.fact(guid, is_id_internal=False)
                fact.data = fact_data
                db.update_fact(fact)
            else:
                fact = Fact(fact_data, id=guid)
                db.add_fact(fact)
            fact_for_nid[id] = fact
            tag_names_for_nid[id] = tags
            w.increase_progress(1)
        # Import logs. This needs to happen before creating the cards,
        # otherwise, the sync protocol will use the scheduling data from the
        # latest repetition log, instead of the correct current one.
        w.set_progress_text(_("Importing logs..."))
        number_of_logs = con.execute(
            "select count() from revlog").fetchone()[0]
        w.set_progress_range(number_of_logs)
        w.set_progress_update_interval(number_of_logs / 20)
        for id, cid, usn, ease, ivl, lastIvl, factor, time, type_ in \
            con.execute("""select id, cid, usn, ease, ivl, lastIvl, factor,
            time, type from revlog"""):
            # usn: syncing related, ignore.
            if type_ == 0:  # Acquisition phase.
                grade = 0
            else:  # Retention phase.
                grade = ease + 1  # Anki ease is from 1 to 4.
            timestamp = int(id / 1000)
            scheduled_interval = lastIvl * 86400 if lastIvl > 0 else 0
            new_interval = ivl * 86400 if ivl > 0 else 0
            next_rep = timestamp + new_interval
            easiness = factor / 1000 if factor else 2.5
            db.log_repetition(timestamp=timestamp,
                              card_id=cid,
                              grade=grade,
                              easiness=easiness,
                              acq_reps=0,
                              ret_reps=0,
                              lapses=0,
                              acq_reps_since_lapse=0,
                              ret_reps_since_lapse=0,
                              scheduled_interval=scheduled_interval,
                              actual_interval=scheduled_interval,
                              thinking_time=int(time / 1000),
                              next_rep=next_rep,
                              scheduler_data=0)
            w.increase_progress(1)
        # Import cards.
        w.set_progress_text(_("Importing cards..."))
        number_of_cards = con.execute(
            "select count() from cards").fetchone()[0]
        w.set_progress_range(number_of_cards)
        w.set_progress_update_interval(number_of_cards / 20)
        for id, nid, did, ord, mod, usn, type_, queue, due, ivl, factor, reps, \
            lapses, left, odue, odid, flags, data in con.execute("""select id,
            nid, did, ord, mod, usn, type, queue, due, ivl, factor, reps,
            lapses, left, odue, odid, flags, data from cards"""):
            # type: 0=new, 1=learning, 2=due
            # queue: same as above, and -1=suspended,
            #        -2=user buried, -3=sched buried
            # due is used differently for different queues.
            # - new queue: note id or random int
            # - rev queue: integer day
            # - lrn queue: integer timestamp
            # In Mnemosyne, type=2 / rev queue corresponds to grades >= 2.
            # mod: modification time, but gets updated on each answer.
            # usn: syncing related, ignore.
            # left: repetitions left to graduation, ignore.
            # odue: original due, related to filtered decks, ignore.
            # odid: original deck id, related to filtered decks, ignore.
            # flags: seems empty, ignore.
            # data: seems empty, ignore
            fact = fact_for_nid[nid]
            card_type = card_type_for_nid[nid]
            creation_time = int(nid / 1000)
            if card_type.extra_data["type"] == 0:
                fact_view = card_type.fact_views[ord]
            else:  # Cloze.
                fact_view = card_type.fact_views[0]
            already_imported = db.has_card_with_id(id)
            if already_imported:
                card = db.card(id, is_id_internal=False)
                card.card_type = card_type
                card.fact = fact
                card.fact_view = fact_view
                card.creation_time = creation_time
            else:
                card = Card(card_type,
                            fact,
                            fact_view,
                            creation_time=creation_time)
            card.id = id
            card.extra_data["ord"] = ord  # Needed separately for clozes.
            tag_names = [tag_name.strip() for \
                             tag_name in extra_tag_names.split(",")]
            tag_names += [tag_name.strip() for \
                             tag_name in tag_names_for_nid[nid].split(" ")]
            tag_names += [deck_name_for_did[did].strip().replace(",", ";")]
            for tag_name in tag_names:
                if tag_name:
                    card.tags.add(tag_with_name[tag_name])
            card.next_rep = collection_creation_time + due * 86400
            card.last_rep = card.next_rep - ivl * 86400
            card.easiness = factor / 1000 if factor else 2.5
            card.acq_reps = 1  # No information.
            card.ret_reps = reps
            card.lapses = lapses
            card.acq_reps_since_lapse = card.acq_reps  # No information.
            card.ret_reps_since_lapse = card.ret_reps  # No information.
            card.modification_time = modification_time_for_nid[nid]
            self.active = (queue >= 0)
            if type_ == 0:  # 'new', unseen.
                card.reset_learning_data()
            elif type_ == 1:  # 'learning', acquisition phase.
                card.grade = 0
                card.last_rep = mod
                card.next_rep = mod
            else:  # 'due', retention phase.
                card.grade = 4  # No information.
            if card.grade >= 2:
                assert card.ret_reps_since_lapse != 0  # Issue #93 on github.
            if already_imported:
                db.update_card(card)
            else:
                db.add_card(card)
            w.increase_progress(1)
        # Create criteria for 'database' tags.
        for deck_name in deck_name_for_did.values():
            deck_name = deck_name.strip().replace(",", ";")
            if deck_name in [criterion.name for criterion in db.criteria()]:
                continue
            tag = tag_with_name[deck_name]
            criterion = DefaultCriterion(\
                component_manager=self.component_manager)
            criterion.name = deck_name
            criterion._tag_ids_active.add(tag._id)
            criterion._tag_ids_forbidden = set()
            db.add_criterion(criterion)
        # Clean up.
        con.close()
        if tmp_dir:
            shutil.rmtree(tmp_dir)
        w.close_progress()
        self.warned_about_missing_media = False
コード例 #4
0
ファイル: SQLite_sync.py プロジェクト: tbabej/mnemosyne
 def card_from_log_entry(self, log_entry):
     # We should not receive cards with question and answer data, only
     # cards based on facts.
     if "f" in log_entry:
         raise AttributeError
     # Get card object to be deleted now.
     if log_entry["type"] == EventTypes.DELETED_CARD:
         try:
             return self.card(log_entry["o_id"], is_id_internal=False)
         except MnemosyneError:  # There is no fact in the database.
             # We have created and deleted this card since the last sync,
             # so we just return an empty shell.                              
             card_type = self.card_type_with_id("1")
             fact = Fact({"f": "f", "b": "b"}, id="")
             card = Card(card_type, fact, card_type.fact_views[0],
                 creation_time=0)
             card._id = None # Signals special case to 'delete_card'.
             card.id = log_entry["o_id"]
             return card
     # Create an empty shell of card object that will be deleted later
     # during this sync.
     if "tags" not in log_entry:
         card_type = self.card_type_with_id("1")
         fact = Fact({"f": "f", "b": "b"}, id="")
         card = Card(card_type, fact, card_type.fact_views[0],
             creation_time=0)
         card.id = log_entry["o_id"]
         return card
     # Create card object.
     if "card_t" not in log_entry:
         # Client only supports simple cards.
         card_type = self.card_type_with_id("1")
     else:
         if log_entry["card_t"] not in \
             self.component_manager.card_type_with_id:
             # If the card type is not in the database, it's possible that
             # the data for this card type will follow later during the 
             # sync. In that case, create a dummy card type here, which 
             # will be corrected by a later edit event. Hovewer, we still 
             # need to instantiate this card type later, so that we can 
             # catch errors, e.g. due to bad plugins.
             try:
                 self.activate_plugins_for_card_type_with_id\
                     (log_entry["card_t"])
                 card_type = self.card_type_with_id\
                     (log_entry["card_t"])
             except:
                 self.card_types_to_instantiate_later.add(\
                     log_entry["card_t"])
                 card_type = self.card_type_with_id("1")
                 log_entry["fact_v"] = card_type.fact_views[0].id
         else:
             card_type = self.card_type_with_id(log_entry["card_t"])
     fact = self.fact(log_entry["fact"], is_id_internal=False)
     # When importing, set the creation time to the current time.
     if self.importing and not self.importing_with_learning_data:
         log_entry["c_time"] = int(time.time())
         log_entry["m_time"] = int(time.time())
     for fact_view in card_type.fact_views:
         if fact_view.id == log_entry["fact_v"]:
             card = Card(card_type, fact, fact_view,
                 creation_time=log_entry["c_time"])
             break
     for tag_id in log_entry["tags"].split(","):
         if self.has_tag_with_id(tag_id):
             card.tags.add(self.tag(tag_id, is_id_internal=False))
         else:
             # The tag has been deleted later later during the log. Don't
             # worry about it now, this will be corrected by a later
             # EDITED_CARD event.
             pass
     if self.importing:
         if len(self.extra_tags_on_import) != 0:
             card.tags.discard(\
                 self.tag("__UNTAGGED__", is_id_internal=False))
         for tag in self.extra_tags_on_import:
             card.tags.add(tag)
     # Construct rest of card. The 'active' property does not need to be
     # handled here, as default criterion will be applied to the card
     # in the database functions.
     card.id = log_entry["o_id"]
     if (log_entry["type"] != EventTypes.ADDED_CARD) or self.importing:
         if self.has_card_with_id(card.id):
             card._id = self.con.execute("select _id from cards where id=?",
                 (card.id, )).fetchone()[0]
         else:
             # Importing a card for the first time, so it is not yet in the
             # database.
             pass
     card.modification_time = log_entry["m_time"]
     card.grade = log_entry["gr"]
     card.easiness = log_entry["e"]
     card.acq_reps = log_entry["ac_rp"]
     card.ret_reps = log_entry["rt_rp"]
     card.lapses = log_entry["lps"]
     card.acq_reps_since_lapse = log_entry["ac_rp_l"]
     card.ret_reps_since_lapse = log_entry["rt_rp_l"]
     card.last_rep = log_entry["l_rp"]
     card.next_rep = log_entry["n_rp"]
     if "sch_data" in log_entry:
         card.scheduler_data = log_entry["sch_data"]
     if "extra" in log_entry:
         card.extra_data = eval(log_entry["extra"])
     return card