def processDeckTransactions_FORK(self, data): orig_did = self.getDID(data["on"]) # orig_deck = mw.col.decks.get(orig_did) new_did = mw.col.decks.id(data["data"]["name"]) new_deck = mw.col.decks.get(new_did) for cid in mw.col.decks.cids(orig_did): card = mw.col.getCard(cid) note = card.note(reload=True) model = note.model() createNewModel = False # create new model? if createNewModel: new_model = mw.col.models.copy(model) # models.copy saves new_note = copy.deepcopy(note) new_note.col = note.col new_note.id = timestampID(mw.col.db, "notes") new_note.guid = guid64() if createNewModel: new_note._model = new_model new_note.mid = new_model['id'] new_note.flush() new_card = copy.deepcopy(card) new_card.col = card.col new_card.id = timestampID(mw.col.db, "cards") new_card.crt = intTime() new_card.did = new_did new_card.nid = new_note.id new_card.flush() mw.col.decks.save(new_deck) mw.col.decks.flush()
def copyCards(nids, review): mw.checkpoint("Copy Notes") mw.progress.start() # Copy notes for nid in nids: print "Found note: %s" % (nid) note = mw.col.getNote(nid) model = note._model # Create new note note_copy = anki.notes.Note(mw.col, model=model) # Copy tags and fields (all model fields) from original note note_copy.tags = note.tags note_copy.fields = note.fields note_copy.id = timestampID(note.col.db, "notes", note.id) # Refresh note and add to database note_copy.flush() mw.col.addNote(note_copy) nid_copy = note_copy.id cards_copy = note_copy.cards() cards = note.cards() ord_to_card = {card.ord: card for card in cards} ord_to_card_copy = {card.ord: card for card in cards_copy} if review: for card in cards: ord = card.ord card_copy = ord_to_card_copy.get(ord) if card_copy: card.id = card_copy.id card.nid = nid_copy else: tooltip("We copy a card which should not exists.") card.id = timestampID(mw.col.db, "cards") card.nid = nid_copy card.flush() else: for card_copy in cards_copy: ord = card_copy.ord card = ord_to_card.get(ord) if card: card_copy.did = card.odid or card.did card_copy.flush() # Reset collection and main window mw.col.reset() mw.reset() tooltip(_("""Cards copied."""))
def __init__(self, col: "anki.collection._Collection", id: Optional[int] = None) -> None: self.col = col self.timerStarted = None self._qa = None self._note = None if id: self.id = id self.load() else: # to flush, set nid, ord, and due self.id = timestampID(col.db, "cards") self.did = 1 self.crt = intTime() self.type = 0 self.queue = 0 self.ivl = 0 self.factor = 0 self.reps = 0 self.lapses = 0 self.left = 0 self.odue = 0 self.odid = 0 self.flags = 0 self.data = ""
def __init__(self, col, model=None, id=None): """A note. Exactly one of model and id should be set. Not both. keyword arguments: id -- a note id. In this case, current note is the note with this id model -- A model object. In which case the note the note use this model. """ assert not (model and id) self.col = col if id: self.id = id self.load() else: self.id = timestampID(col.db, "notes") self.guid = guid64() self._model = model self.mid = model['id'] self.tags = [] self.fields = [""] * len(self._model['flds']) self.flags = 0 self.data = "" self._fmap = self.col.models.fieldMap(self._model) self.scm = self.col.scm
def __init__(self, col, id=None): """ This function returns a card object from the collection given in argument. If an id is given, then the card is the one with this id from the collection. Otherwise a new card, assumed to be from this collection, is created. Keyword arguments: col -- a collection id -- an identifier of a card. Int. """ self.col = col self.timerStarted = None self._qa = None self._note = None if id: self.id = id self.load() else: # to flush, set nid, ord, and due self.id = timestampID(col.db, "cards") self.did = 1 self.crt = intTime() self.type = CARD_NEW self.queue = QUEUE_NEW_CRAM self.ivl = 0 self.factor = 0 self.reps = 0 self.lapses = 0 self.left = 0 self.odue = 0 self.odid = 0 self.flags = 0 self.data = ""
def flush(self, mod=None): "If fields or tags have changed, write changes to disk." assert self.scm == self.col.scm self.newlyAdded = (self.id is None) sfld = stripHTMLMedia(self.fields[self.col.models.sortIdx( self._model)]) tags = self.stringTags() fields = self.joinedFields() if not mod and self.col.db.scalar( "select 1 from notes where id = ? and tags = ? and flds = ?", self.id, tags, fields): return csum = fieldChecksum(self.fields[0]) self.mod = mod if mod else intTime() self.usn = self.col.usn() if self.id is None: self.id = timestampID(self.col.db, "notes") self.col.db.execute( """insert into notes values (?,?,?,?,?,?,?,?,?,?,?)""", self.id, self.guid, self.mid, self.mod, self.usn, tags, fields, sfld, csum, self.flags, self.data) else: self.col.db.execute( """update notes set guid=?, mid=?, mod=?, usn=?, tags=?, flds=?, sfld=?, csum=?, flags=?, data=? where id = ?""", self.guid, self.mid, self.mod, self.usn, tags, fields, sfld, csum, self.flags, self.data, self.id) self.col.tags.register(self.tags) self._postFlush()
def __init__( self, col: anki.collection._Collection, id: Optional[int] = None ) -> None: self.col = col.weakref() self.timerStarted = None self._render_output: Optional[anki.template.TemplateRenderOutput] = None self._note = None if id: self.id = id self.load() else: # to flush, set nid, ord, and due self.id = timestampID(col.db, "cards") self.did = 1 self.crt = intTime() self.type = CARD_TYPE_NEW self.queue = QUEUE_TYPE_NEW self.ivl = 0 self.factor = 0 self.reps = 0 self.lapses = 0 self.left = 0 self.odue = 0 self.odid = 0 self.flags = 0 self.data = ""
def copyToIncrementalDeck(self, noteId): "Copies all cards belonging to the note id to the incremental deck." cardIds = mw.col.db.list("select id from cards where nid=?", noteId) for cardId in cardIds: # Create a copy, and copy it to the current deck id. card = mw.col.getCard(cardId) card.did = mw.col.decks.selected() card.id = timestampID(mw.col.db, "cards") card.flush()
def __init__(self, col, model=None, id=None): assert not (model and id) self.col = col if id: self.id = id self.load() else: self.id = timestampID(col.db, "notes") self.guid = guid64() self._model = model self.mid = model['id'] self.tags = [] self.fields = [""] * len(self._model['flds']) self.flags = 0 self.data = "" self._fmap = self.col.models.fieldMap(self._model) self.scm = self.col.scm
def getNoteData(self, sortId, question, answerDicts, ref, siblings, connections): """returns a list of all content needed to create the a new note and the media contained in that note in a list""" noteList = [] media = [] # Set field Reference noteList.append('<ul>%s</ul>' % ref) # Set field Question qtContent, qtMedia = getNodeContent(tagList=self.tagList, tag=question) noteList.append(qtContent) media.append(qtMedia) # Set Answer fields aId = 0 for answerDict in answerDicts: if answerDict['isAnswer']: aId += 1 # noinspection PyTypeChecker anContent, anMedia = getNodeContent(tagList=self.tagList, tag=answerDict['nodeTag']) noteList.append(anContent) media.append(anMedia) answerDict['aId'] = str(aId) # noinspection PyShadowingNames for i in range(aId, X_MAX_ANSWERS): noteList.append('') # set field ID noteList.append(sortId) # set field Meta meta = self.getXMindMeta(question=question, answerDicts=answerDicts, siblings=siblings, connections=connections) noteList.append(meta) nId = timestampID(self.col.db, "notes") noteData = [nId, guid64(), self.model['id'], intTime(), self.col.usn(), self.currentSheetImport['tag'], joinFields(noteList), "", "", 0, ""] return noteData, media
def flush(self): self.mod = intTime() self.usn = self.col.usn() # bug check if self.queue == 2 and self.odue and not self.col.decks.isDyn(self.did): runHook("odueInvalid") assert self.due < 4294967296 if self.id is None: self.id = timestampID(self.col.db, "cards") self.col.db.execute( """ insert into cards values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", self.id, self.nid, self.did, self.ord, self.mod, self.usn, self.type, self.queue, self.due, self.ivl, self.factor, self.reps, self.lapses, self.left, self.odue, self.odid, self.flags, self.data, self.review_start_time ) else: self.col.db.execute( """update cards set nid=?, did=?, ord=?, mod=?, usn=?, type=?, queue=?, due=?, ivl=?, factor=?, reps=?, lapses=?, left=?, odue=?, odid=?, flags=?, data=?, review_start_time=? where id = ?""", self.nid, self.did, self.ord, self.mod, self.usn, self.type, self.queue, self.due, self.ivl, self.factor, self.reps, self.lapses, self.left, self.odue, self.odid, self.flags, self.data, self.review_start_time, self.id) self.col.log(self)
def __init__(self, col, model: Optional[Any] = None, id: Optional[int] = None) -> None: assert not (model and id) self.col = col self.newlyAdded = False if id: self.id = id self.load() else: self.id = timestampID(col.db, "notes") self.guid = guid64() self._model = model self.mid = model['id'] self.tags = [] self.fields = [""] * len(self._model['flds']) self.flags = 0 self.data = "" self._fmap = self.col.models.fieldMap(self._model) self.scm = self.col.scm
def __init__(self, col, id=None): self.col = col self.timerStarted = None self._qa = None self._note = None if id: self.id = id self.load() else: # to flush, set nid, ord, and due self.id = timestampID(col.db, "cards") self.did = 1 self.crt = intTime() self.type = 0 self.queue = 0 self.ivl = 0 self.factor = 0 self.reps = 0 self.lapses = 0 self.left = 0 self.edue = 0 self.flags = 0 self.data = ""
def __init__( self, col: anki.storage._Collection, model: Optional[NoteType] = None, id: Optional[int] = None, ) -> None: assert not (model and id) self.col = col self.newlyAdded = False if id: self.id = id self.load() else: self.id = timestampID(col.db, "notes") self.guid = guid64() self._model = model self.mid = model["id"] self.tags = [] self.fields = [""] * len(self._model["flds"]) self.flags = 0 self.data = "" self._fmap = self.col.models.fieldMap(self._model) self.scm = self.col.scm
def importNotes(self, notes): "Convert each card into a note, apply attributes and add to col." assert self.mappingOk() # gather checks for duplicate comparison csums = {} for csum, id in self.col.db.execute( "select csum, id from notes where mid = ?", self.model['id']): if csum in csums: csums[csum].append(id) else: csums[csum] = [id] firsts = {} fld0idx = self.mapping.index(self.model['flds'][0]['name']) self._fmap = self.col.models.fieldMap(self.model) self._nextID = timestampID(self.col.db, "notes") # loop through the notes updates = [] new = [] self._ids = [] self._cards = [] for n in notes: fld0 = n.fields[fld0idx] csum = fieldChecksum(fld0) # first field must exist if not fld0: self.log.append( _("Empty first field: %s") % " ".join(n.fields)) continue # earlier in import? if fld0 in firsts: # duplicates in source file; log and ignore self.log.append(_("Appeared twice in file: %s") % fld0) continue firsts[fld0] = True # already exists? found = False if csum in csums: # csum is not a guarantee; have to check for id in csums[csum]: flds = self.col.db.scalar( "select flds from notes where id = ?", id) sflds = splitFields(flds) if fld0 == sflds[0]: # duplicate found = True if self.update: data = self.updateData(n, id, sflds) if data: updates.append(data) found = True break # newly add if not found: data = self.newData(n) if data: new.append(data) # note that we've seen this note once already firsts[fld0] = True self.addNew(new) self.addUpdates(updates) self.col.updateFieldCache(self._ids) # generate cards if self.col.genCards(self._ids): self.log.insert( 0, _("Empty cards found. Please run Tools>Maintenance>Empty Cards." )) # apply scheduling updates self.updateCards() # make sure to update sflds, etc self.log.append( _("%(a)d notes added, %(b)d notes updated.") % dict(a=len(new), b=self.updateCount)) self.total = len(self._ids)
def importNotes(self, notes): "Convert each card into a note, apply attributes and add to col." assert self.mappingOk() # note whether tags are mapped self._tagsMapped = False for f in self.mapping: if f == "_tags": self._tagsMapped = True # gather checks for duplicate comparison csums = {} for csum, id in self.col.db.execute( "select csum, id from notes where mid = ?", self.model['id']): if csum in csums: csums[csum].append(id) else: csums[csum] = [id] firsts = {} fld0idx = self.mapping.index(self.model['flds'][0]['name']) self._fmap = self.col.models.fieldMap(self.model) self._nextID = timestampID(self.col.db, "notes") # loop through the notes updates = [] new = [] self._ids = [] self._cards = [] self._emptyNotes = False for n in notes: if not self.allowHTML: for c in range(len(n.fields)): n.fields[c] = cgi.escape(n.fields[c]) fld0 = n.fields[fld0idx] csum = fieldChecksum(fld0) # first field must exist if not fld0: self.log.append(_("Empty first field: %s") % " ".join(n.fields)) continue # earlier in import? if fld0 in firsts and self.importMode != 2: # duplicates in source file; log and ignore self.log.append(_("Appeared twice in file: %s") % fld0) continue firsts[fld0] = True # already exists? found = False if csum in csums: # csum is not a guarantee; have to check for id in csums[csum]: flds = self.col.db.scalar( "select flds from notes where id = ?", id) sflds = splitFields(flds) if fld0 == sflds[0]: # duplicate found = True if self.importMode == 0: data = self.updateData(n, id, sflds) if data: updates.append(data) found = True break elif self.importMode == 2: # allow duplicates in this case found = False # newly add if not found: data = self.newData(n) if data: new.append(data) # note that we've seen this note once already firsts[fld0] = True self.addNew(new) self.addUpdates(updates) self.col.updateFieldCache(self._ids) # generate cards if self.col.genCards(self._ids): self.log.insert(0, _( "Empty cards found. Please run Tools>Empty Cards.")) # apply scheduling updates self.updateCards() # make sure to update sflds, etc part1 = ngettext("%d note added", "%d notes added", len(new)) % len(new) part2 = ngettext("%d note updated", "%d notes updated", self.updateCount) % self.updateCount self.log.append("%s, %s." % (part1, part2)) if self._emptyNotes: self.log.append(_("""\ One or more notes were not imported, because they didn't generate any cards. \ This can happen when you have empty fields or when you have not mapped the \ content in the text file to the correct fields.""")) self.total = len(self._ids)
def importNotes(self, notes): "Convert each card into a note, apply attributes and add to col." assert self.mappingOk() # note whether tags are mapped self._tagsMapped = False for f in self.mapping: if f == "_tags": self._tagsMapped = True # gather checks for duplicate comparison csums = {} for csum, id in self.col.db.execute( "select csum, id from notes where mid = ?", self.model['id']): if csum in csums: csums[csum].append(id) else: csums[csum] = [id] firsts = {} fld0idx = self.mapping.index(self.model['flds'][0]['name']) self._fmap = self.col.models.fieldMap(self.model) self._nextID = timestampID(self.col.db, "notes") # loop through the notes updates = [] updateLog = [] updateLogTxt = _("First field matched: %s") dupeLogTxt = _("Added duplicate with first field: %s") new = [] self._ids = [] self._cards = [] self._emptyNotes = False dupeCount = 0 dupes = [] for n in notes: for c in range(len(n.fields)): if not self.allowHTML: n.fields[c] = html.escape(n.fields[c], quote=False) n.fields[c] = n.fields[c].strip() if not self.allowHTML: n.fields[c] = n.fields[c].replace("\n", "<br>") n.fields[c] = unicodedata.normalize("NFC", n.fields[c]) n.tags = [unicodedata.normalize("NFC", t) for t in n.tags] fld0 = n.fields[fld0idx] csum = fieldChecksum(fld0) # first field must exist if not fld0: self.log.append( _("Empty first field: %s") % " ".join(n.fields)) continue # earlier in import? if fld0 in firsts and self.importMode != 2: # duplicates in source file; log and ignore self.log.append(_("Appeared twice in file: %s") % fld0) continue firsts[fld0] = True # already exists? found = False if csum in csums: # csum is not a guarantee; have to check for id in csums[csum]: flds = self.col.db.scalar( "select flds from notes where id = ?", id) sflds = splitFields(flds) if fld0 == sflds[0]: # duplicate found = True if self.importMode == 0: data = self.updateData(n, id, sflds) if data: updates.append(data) updateLog.append(updateLogTxt % fld0) dupeCount += 1 found = True elif self.importMode == 1: dupeCount += 1 elif self.importMode == 2: # allow duplicates in this case if fld0 not in dupes: # only show message once, no matter how many # duplicates are in the collection already updateLog.append(dupeLogTxt % fld0) dupes.append(fld0) found = False # newly add if not found: data = self.newData(n) if data: new.append(data) # note that we've seen this note once already firsts[fld0] = True self.addNew(new) self.addUpdates(updates) # make sure to update sflds, etc self.col.updateFieldCache(self._ids) # generate cards if self.col.genCards(self._ids): self.log.insert( 0, _("Empty cards found. Please run Tools>Empty Cards.")) # apply scheduling updates self.updateCards() # we randomize or order here, to ensure that siblings # have the same due# did = self.col.decks.selected() conf = self.col.decks.confForDid(did) # in order due? if conf['new']['order'] == NEW_CARDS_RANDOM: self.col.sched.randomizeCards(did) part1 = ngettext("%d note added", "%d notes added", len(new)) % len(new) part2 = ngettext("%d note updated", "%d notes updated", self.updateCount) % self.updateCount if self.importMode == 0: unchanged = dupeCount - self.updateCount elif self.importMode == 1: unchanged = dupeCount else: unchanged = 0 part3 = ngettext("%d note unchanged", "%d notes unchanged", unchanged) % unchanged self.log.append("%s, %s, %s." % (part1, part2, part3)) self.log.extend(updateLog) if self._emptyNotes: self.log.append( _("""\ One or more notes were not imported, because they didn't generate any cards. \ This can happen when you have empty fields or when you have not mapped the \ content in the text file to the correct fields.""")) self.total = len(self._ids)
def importNotes(self, notes): "Convert each card into a note, apply attributes and add to col." print "importNotes()" print notes assert self.mappingOk() # note whether tags are mapped self._tagsMapped = False for f in self.mapping: if f == "_tags": self._tagsMapped = True # gather checks for duplicate comparison csums = {} for csum, id in self.col.db.execute( "select csum, id from notes where mid = ?", self.model['id']): if csum in csums: csums[csum].append(id) else: csums[csum] = [id] firsts = {} fld0idx = self.mapping.index(self.model['flds'][0]['name']) self._fmap = self.col.models.fieldMap(self.model) self._nextID = timestampID(self.col.db, "notes") # loop through the notes updates = [] updateLog = [] updateLogTxt = _("First field matched: %s") dupeLogTxt = _("Added duplicate with first field: %s") new = [] self._ids = [] self._cards = [] self._emptyNotes = False for n in notes: if not self.allowHTML: for c in range(len(n.fields)): n.fields[c] = cgi.escape(n.fields[c]) fld0 = n.fields[fld0idx] csum = fieldChecksum(fld0) # first field must exist if not fld0: self.log.append(_("Empty first field: %s") % " ".join(n.fields)) continue # earlier in import? if fld0 in firsts and self.importMode != 2: # duplicates in source file; log and ignore self.log.append(_("Appeared twice in file: %s") % fld0) continue firsts[fld0] = True # already exists? found = False if csum in csums: # csum is not a guarantee; have to check for id in csums[csum]: flds = self.col.db.scalar( "select flds from notes where id = ?", id) sflds = splitFields(flds) if fld0 == sflds[0]: # duplicate found = True if self.importMode == 0: data = self.updateData(n, id, sflds) if data: updates.append(data) updateLog.append(updateLogTxt % fld0) found = True break elif self.importMode == 2: # allow duplicates in this case updateLog.append(dupeLogTxt % fld0) found = False # newly add if not found: data = self.newData(n) if data: new.append(data) # note that we've seen this note once already firsts[fld0] = True did = self.col.decks.id(self.deck) self.col.decks.select(did) #SUPER IMPORTANT (setting the associated deck to the model) self.model['did'] = did print "Selected: ", self.col.decks.get(self.col.decks.selected()); self.addNew(new) self.addUpdates(updates) # make sure to update sflds, etc self.col.updateFieldCache(self._ids) # generate cards if self.col.genCards(self._ids): self.log.insert(0, _( "Empty cards found. Please run Tools>Empty Cards.")) # apply scheduling updates self.updateCards() # we randomize or order here, to ensure that siblings # have the same due# # m = self.col.models.byName("Basic") # deck = self.col.decks.get(did) # deck['mid'] = m['id'] # self.col.decks.save(deck) # print "Deck:", self.col.decks.byName(self.deck) # print "DID:", did # save tags to model # m = self.note.model() # m['tags'] = self.note.tags # self.mw.col.models.save(m) conf = self.col.decks.confForDid(did) # print "Conf: ",conf # in order due? if conf['new']['order'] == NEW_CARDS_RANDOM: self.col.sched.randomizeCards(did) else: self.col.sched.orderCards(did) part1 = ngettext("%d note added", "%d notes added", len(new)) % len(new) part2 = ngettext("%d note updated", "%d notes updated", self.updateCount) % self.updateCount self.log.append("%s, %s." % (part1, part2)) print part1, part2, "on deck: [", self.deck, "]" self.log.extend(updateLog) if self._emptyNotes: print "there were empty notes" self.log.append(_("""\ One or more notes were not imported, because they didn't generate any cards. \ This can happen when you have empty fields or when you have not mapped the \ content in the text file to the correct fields.""")) self.total = len(self._ids)
def importNotes(self, notes): "Convert each card into a note, apply attributes and add to col." assert self.mappingOk() # gather checks for duplicate comparison csums = {} for csum, id in self.col.db.execute( "select csum, id from notes where mid = ?", self.model['id']): if csum in csums: csums[csum].append(id) else: csums[csum] = [id] firsts = {} fld0idx = self.mapping.index(self.model['flds'][0]['name']) self._fmap = self.col.models.fieldMap(self.model) self._nextID = timestampID(self.col.db, "notes") # loop through the notes updates = [] new = [] self._ids = [] self._cards = [] for n in notes: fld0 = n.fields[fld0idx] csum = fieldChecksum(fld0) # first field must exist if not fld0: self.log.append(_("Empty first field: %s") % " ".join(n.fields)) continue # earlier in import? if fld0 in firsts: # duplicates in source file; log and ignore self.log.append(_("Appeared twice in file: %s") % fld0) continue firsts[fld0] = True # already exists? found = False if csum in csums: # csum is not a guarantee; have to check for id in csums[csum]: flds = self.col.db.scalar( "select flds from notes where id = ?", id) sflds = splitFields(flds) if fld0 == sflds[0]: # duplicate found = True if self.update: data = self.updateData(n, id, sflds) if data: updates.append(data) found = True break # newly add if not found: data = self.newData(n) if data: new.append(data) # note that we've seen this note once already firsts[fld0] = True self.addNew(new) self.addUpdates(updates) self.col.updateFieldCache(self._ids) # generate cards if self.col.genCards(self._ids): self.log.insert(0, _( "Empty cards found. Please run Tools>Maintenance>Empty Cards.")) # apply scheduling updates self.updateCards() # make sure to update sflds, etc self.log.append(_("%(a)d notes added, %(b)d notes updated.") % dict(a=len(new), b=self.updateCount)) self.total = len(self._ids)
def importNotes(self, notes): "Convert each card into a note, apply attributes and add to col." assert self.mappingOk() # note whether tags are mapped self._tagsMapped = False for f in self.mapping: if f == "_tags": self._tagsMapped = True # gather checks for duplicate comparison csums = {} for csum, id in self.col.db.execute( "select csum, id from notes where mid = ?", self.model['id']): if csum in csums: csums[csum].append(id) else: csums[csum] = [id] firsts = {} fld0idx = self.mapping.index(self.model['flds'][0]['name']) self._fmap = self.col.models.fieldMap(self.model) self._nextID = timestampID(self.col.db, "notes") # loop through the notes updates = [] updateLog = [] updateLogTxt = _("Update as first field matched: %s") dupeLogTxt = _("Added duplicate with first field: %s") new = [] self._ids = [] self._cards = [] self._emptyNotes = False for n in notes: if not self.allowHTML: for c in range(len(n.fields)): n.fields[c] = cgi.escape(n.fields[c]) fld0 = n.fields[fld0idx] csum = fieldChecksum(fld0) # first field must exist if not fld0: self.log.append(_("Empty first field: %s") % " ".join(n.fields)) continue # earlier in import? if fld0 in firsts and self.importMode != 2: # duplicates in source file; log and ignore self.log.append(_("Appeared twice in file: %s") % fld0) continue firsts[fld0] = True # already exists? found = False if csum in csums: # csum is not a guarantee; have to check for id in csums[csum]: flds = self.col.db.scalar( "select flds from notes where id = ?", id) sflds = splitFields(flds) if fld0 == sflds[0]: # duplicate found = True if self.importMode == 0: data = self.updateData(n, id, sflds) if data: updates.append(data) updateLog.append(updateLogTxt % fld0) found = True break elif self.importMode == 2: # allow duplicates in this case updateLog.append(dupeLogTxt % fld0) found = False # newly add if not found: data = self.newData(n) if data: new.append(data) # note that we've seen this note once already firsts[fld0] = True self.addNew(new) self.addUpdates(updates) # make sure to update sflds, etc self.col.updateFieldCache(self._ids) # generate cards if self.col.genCards(self._ids): self.log.insert(0, _( "Empty cards found. Please run Tools>Empty Cards.")) # apply scheduling updates self.updateCards() self.col.sched.maybeRandomizeDeck() part1 = ngettext("%d note added", "%d notes added", len(new)) % len(new) part2 = ngettext("%d note updated", "%d notes updated", self.updateCount) % self.updateCount self.log.append("%s, %s." % (part1, part2)) self.log.extend(updateLog) if self._emptyNotes: self.log.append(_("""\ One or more notes were not imported, because they didn't generate any cards. \ This can happen when you have empty fields or when you have not mapped the \ content in the text file to the correct fields.""")) self.total = len(self._ids)
def importNotes(self, notes: list[ForeignNote]) -> None: "Convert each card into a note, apply attributes and add to col." assert self.mappingOk() # note whether tags are mapped self._tagsMapped = False for f in self.mapping: if f == "_tags": self._tagsMapped = True # gather checks for duplicate comparison csums: dict[str, list[NoteId]] = {} for csum, id in self.col.db.execute( "select csum, id from notes where mid = ?", self.model["id"]): if csum in csums: csums[csum].append(id) else: csums[csum] = [id] firsts: dict[str, bool] = {} fld0idx = self.mapping.index(self.model["flds"][0]["name"]) self._fmap = self.col.models.field_map(self.model) self._nextID = NoteId(timestampID(self.col.db, "notes")) # loop through the notes updates: list[Updates] = [] updateLog = [] new = [] self._ids: list[NoteId] = [] self._cards: list[tuple] = [] dupeCount = 0 dupes: list[str] = [] for n in notes: for c, field in enumerate(n.fields): if not self.allowHTML: n.fields[c] = html.escape(field, quote=False) n.fields[c] = field.strip() if not self.allowHTML: n.fields[c] = field.replace("\n", "<br>") fld0 = unicodedata.normalize("NFC", n.fields[fld0idx]) # first field must exist if not fld0: self.log.append( self.col.tr.importing_empty_first_field( val=" ".join(n.fields))) continue csum = fieldChecksum(fld0) # earlier in import? if fld0 in firsts and self.importMode != ADD_MODE: # duplicates in source file; log and ignore self.log.append( self.col.tr.importing_appeared_twice_in_file(val=fld0)) continue firsts[fld0] = True # already exists? found = False if csum in csums: # csum is not a guarantee; have to check for id in csums[csum]: flds = self.col.db.scalar( "select flds from notes where id = ?", id) sflds = splitFields(flds) if fld0 == sflds[0]: # duplicate found = True if self.importMode == UPDATE_MODE: data = self.updateData(n, id, sflds) if data: updates.append(data) updateLog.append( self.col.tr.importing_first_field_matched( val=fld0)) dupeCount += 1 found = True elif self.importMode == IGNORE_MODE: dupeCount += 1 elif self.importMode == ADD_MODE: # allow duplicates in this case if fld0 not in dupes: # only show message once, no matter how many # duplicates are in the collection already updateLog.append( self.col.tr. importing_added_duplicate_with_first_field( val=fld0, )) dupes.append(fld0) found = False # newly add if not found: new_data = self.newData(n) if new_data: new.append(new_data) # note that we've seen this note once already firsts[fld0] = True self.addNew(new) self.addUpdates(updates) # generate cards + update field cache self.col.after_note_updates(self._ids, mark_modified=False) # apply scheduling updates self.updateCards() # we randomize or order here, to ensure that siblings # have the same due# did = self.col.decks.selected() conf = self.col.decks.config_dict_for_deck_id(did) # in order due? if not conf["dyn"] and conf["new"]["order"] == NEW_CARDS_RANDOM: self.col.sched.randomizeCards(did) part1 = self.col.tr.importing_note_added(count=len(new)) part2 = self.col.tr.importing_note_updated(count=self.updateCount) if self.importMode == UPDATE_MODE: unchanged = dupeCount - self.updateCount elif self.importMode == IGNORE_MODE: unchanged = dupeCount else: unchanged = 0 part3 = self.col.tr.importing_note_unchanged(count=unchanged) self.log.append(f"{part1}, {part2}, {part3}.") self.log.extend(updateLog) self.total = len(self._ids)
def importNotes(self, notes: List[ForeignNote]) -> None: "Convert each card into a note, apply attributes and add to col." assert self.mappingOk() # note whether tags are mapped self._tagsMapped = False for f in self.mapping: if f == "_tags": self._tagsMapped = True # gather checks for duplicate comparison csums: Dict[str, List[int]] = {} for csum, id in self.col.db.execute( "select csum, id from notes where mid = ?", self.model["id"]): if csum in csums: csums[csum].append(id) else: csums[csum] = [id] firsts: Dict[str, bool] = {} fld0idx = self.mapping.index(self.model["flds"][0]["name"]) self._fmap = self.col.models.fieldMap(self.model) self._nextID = timestampID(self.col.db, "notes") # loop through the notes updates = [] updateLog = [] new = [] self._ids: List[int] = [] self._cards: List[Tuple] = [] dupeCount = 0 dupes: List[str] = [] for n in notes: for c in range(len(n.fields)): if not self.allowHTML: n.fields[c] = html.escape(n.fields[c], quote=False) n.fields[c] = n.fields[c].strip() if not self.allowHTML: n.fields[c] = n.fields[c].replace("\n", "<br>") fld0 = n.fields[fld0idx] csum = fieldChecksum(fld0) # first field must exist if not fld0: self.log.append( self.col.tr(TR.IMPORTING_EMPTY_FIRST_FIELD, val=" ".join(n.fields))) continue # earlier in import? if fld0 in firsts and self.importMode != ADD_MODE: # duplicates in source file; log and ignore self.log.append( self.col.tr(TR.IMPORTING_APPEARED_TWICE_IN_FILE, val=fld0)) continue firsts[fld0] = True # already exists? found = False if csum in csums: # csum is not a guarantee; have to check for id in csums[csum]: flds = self.col.db.scalar( "select flds from notes where id = ?", id) sflds = splitFields(flds) if fld0 == sflds[0]: # duplicate found = True if self.importMode == UPDATE_MODE: data = self.updateData(n, id, sflds) if data: updates.append(data) updateLog.append( self.col.tr( TR.IMPORTING_FIRST_FIELD_MATCHED, val=fld0)) dupeCount += 1 found = True elif self.importMode == IGNORE_MODE: dupeCount += 1 elif self.importMode == ADD_MODE: # allow duplicates in this case if fld0 not in dupes: # only show message once, no matter how many # duplicates are in the collection already updateLog.append( self.col.tr( TR. IMPORTING_ADDED_DUPLICATE_WITH_FIRST_FIELD, val=fld0, )) dupes.append(fld0) found = False # newly add if not found: data = self.newData(n) if data: new.append(data) # note that we've seen this note once already firsts[fld0] = True self.addNew(new) self.addUpdates(updates) # generate cards + update field cache self.col.after_note_updates(self._ids, mark_modified=False) # apply scheduling updates self.updateCards() # we randomize or order here, to ensure that siblings # have the same due# did = self.col.decks.selected() conf = self.col.decks.confForDid(did) # in order due? if conf["new"]["order"] == NEW_CARDS_RANDOM: self.col.sched.randomizeCards(did) part1 = self.col.tr(TR.IMPORTING_NOTE_ADDED, count=len(new)) part2 = self.col.tr(TR.IMPORTING_NOTE_UPDATED, count=self.updateCount) if self.importMode == UPDATE_MODE: unchanged = dupeCount - self.updateCount elif self.importMode == IGNORE_MODE: unchanged = dupeCount else: unchanged = 0 part3 = self.col.tr(TR.IMPORTING_NOTE_UNCHANGED, count=unchanged) self.log.append(f"{part1}, {part2}, {part3}.") self.log.extend(updateLog) self.total = len(self._ids)
def importNotes(self, notes): "Convert each card into a note, apply attributes and add to col." assert self.mappingOk() # note whether tags are mapped self._tagsMapped = False for f in self.mapping: if f == "_tags": self._tagsMapped = True # gather checks for duplicate comparison csums = {} for csum, id in self.col.db.execute( "select csum, id from notes where mid = ?", self.model['id']): if csum in csums: csums[csum].append(id) else: csums[csum] = [id] firsts = {} fld0idx = self.mapping.index(self.model['flds'][0]['name']) self._fmap = self.col.models.fieldMap(self.model) self._nextID = timestampID(self.col.db, "notes") # loop through the notes updates = [] updateLog = [] updateLogTxt = _("First field matched: %s") dupeLogTxt = _("Added duplicate with first field: %s") new = [] self._ids = [] self._cards = [] self._emptyNotes = False dupeCount = 0 dupes = [] for n in notes: for c in range(len(n.fields)): if not self.allowHTML: n.fields[c] = cgi.escape(n.fields[c]) n.fields[c] = n.fields[c].strip().replace("\n", "<br>") fld0 = n.fields[fld0idx] csum = fieldChecksum(fld0) # first field must exist if not fld0: self.log.append(_("Empty first field: %s") % " ".join(n.fields)) continue # earlier in import? if fld0 in firsts and self.importMode != 2: # duplicates in source file; log and ignore self.log.append(_("Appeared twice in file: %s") % fld0) continue firsts[fld0] = True # already exists? found = False if csum in csums: # csum is not a guarantee; have to check for id in csums[csum]: flds = self.col.db.scalar( "select flds from notes where id = ?", id) sflds = splitFields(flds) if fld0 == sflds[0]: # duplicate found = True if self.importMode == 0: data = self.updateData(n, id, sflds) if data: updates.append(data) updateLog.append(updateLogTxt % fld0) dupeCount += 1 found = True elif self.importMode == 1: dupeCount += 1 elif self.importMode == 2: # allow duplicates in this case if fld0 not in dupes: # only show message once, no matter how many # duplicates are in the collection already updateLog.append(dupeLogTxt % fld0) dupes.append(fld0) found = False # newly add if not found: data = self.newData(n) if data: new.append(data) # note that we've seen this note once already firsts[fld0] = True self.addNew(new) self.addUpdates(updates) # make sure to update sflds, etc self.col.updateFieldCache(self._ids) # generate cards if self.col.genCards(self._ids): self.log.insert(0, _( "Empty cards found. Please run Tools>Empty Cards.")) # apply scheduling updates self.updateCards() # we randomize or order here, to ensure that siblings # have the same due# did = self.col.decks.selected() conf = self.col.decks.confForDid(did) # in order due? if conf['new']['order'] == NEW_CARDS_RANDOM: self.col.sched.randomizeCards(did) else: self.col.sched.orderCards(did) part1 = ngettext("%d note added", "%d notes added", len(new)) % len(new) part2 = ngettext("%d note updated", "%d notes updated", self.updateCount) % self.updateCount if self.importMode == 0: unchanged = dupeCount - self.updateCount elif self.importMode == 1: unchanged = dupeCount else: unchanged = 0 part3 = ngettext("%d note unchanged", "%d notes unchanged", unchanged) % unchanged self.log.append("%s, %s, %s." % (part1, part2, part3)) self.log.extend(updateLog) if self._emptyNotes: self.log.append(_("""\ One or more notes were not imported, because they didn't generate any cards. \ This can happen when you have empty fields or when you have not mapped the \ content in the text file to the correct fields.""")) self.total = len(self._ids)