def anknotes_finder_findCards_wrap(self, query, order=False, _old=None): tmr = stopwatch.Timer(label='finder\\findCards') log_banner("FINDCARDS SEARCH: " + query, tmr.label, append_newline=False, clear=False) tokens = self._tokenize(query) preds, args = self._where(tokens) write_file_contents('Tokens: '.ljust(25) + ', '.join(tokens), tmr.label) if args: write_file_contents('Args: '.ljust(25) + ', '.join(tokens), tmr.label) if preds is None: write_file_contents('Preds: '.ljust(25) + '<NONE>', tmr.label) log_blank(tmr.label) return [] order, rev = self._order(order) sql = self._query(preds, order) try: res = self.col.db.list(sql, *args) except Exception as ex: # invalid grouping log_error("Error with findCards Query %s: %s.\n%s" % (query, str(ex), [sql, args]), crosspost=tmr.label) return [] if rev: res.reverse() write_file_contents( "FINDCARDS DONE: ".ljust(25) + "%-5s --> %3d results" % (tmr.str_long, len(res)), tmr.label) log_blank(tmr.label) return res return _old(self, query, order)
def mk_banner(fn, display_initial_info=False): l.default_filename = fn my_info_str = info_str % l.default_filename.upper() myTmr = stopwatch.Timer(len(queued_items[fn]), 10, infoStr=my_info_str, do_print=True, label=l.base_path, display_initial_info=display_initial_info) l.go("------------------------------------------------", clear=True) l.go(my_info_str) l.go("------------------------------------------------") return myTmr
def resync_with_local_db(self): log_banner('Resync With Local DB', clear=False, append_newline=False, prepend_newline=True) evernote_guids = get_all_local_db_guids() tmr = stopwatch.Timer(evernote_guids, strInfo='Resync Notes From Local DB', label='resync_with_local_db\\') results = self.evernote.create_evernote_notes(evernote_guids, use_local_db_only=True) """:type: EvernoteNoteFetcherResults""" log(' > Finished Creating Evernote Notes: '.ljust(40) + tmr.str_long) tmr.reset() number = self.anki.update_evernote_notes(results.Notes, log_update_if_unchanged=False) log(' > Finished Updating Anki Notes: '.ljust(40) + tmr.str_long) tooltip = '%d Evernote Notes Created<BR>%d Anki Notes Successfully Updated' % ( results.Local, number) show_report(' > Resync with Local DB Complete', tooltip)
def process_see_also_content(self, anki_note_ids): log = Logger('See Also\\1-process_unadded_see_also_notes\\', rm_path=True) tmr = stopwatch.Timer(anki_note_ids, infoStr='Processing Unadded See Also Notes', label=log.base_path) tmr.info.BannerHeader('error') for a_id in anki_note_ids: ankiNote = self.collection().getNote(a_id) try: items = ankiNote.items() except Exception: log.error("Unable to get note items for Note ID: %d for %s" % (a_id, tmr.base_name)) raise fields = {} for key, value in items: fields[key] = value if fields[FIELDS.SEE_ALSO]: tmr.reportSkipped() continue anki_note_prototype = AnkiNotePrototype( self, fields, ankiNote.tags, ankiNote, count=tmr.count, count_update=tmr.counts.updated.completed.val, max_count=tmr.max, light_processing=True) if not anki_note_prototype.Fields[FIELDS.SEE_ALSO]: tmr.reportSkipped() continue log.go("Detected see also contents for Note '%s': %s" % (get_evernote_guid_from_anki_fields(fields), fields[FIELDS.TITLE])) log.go(u" ::: %s " % strip_tags_and_new_lines(fields[FIELDS.SEE_ALSO])) tmr.autoStep(anki_note_prototype.update_note(), fields[FIELDS.TITLE], update=True)
'MakeNoteQueue-' + currentLog, timestamp=False, do_print=True, clear=True) log(" CHECKING %3d PENDING MAKE NOTE QUEUE ITEMS " % len(pending_queued_items), 'MakeNoteQueue-' + currentLog, clear=False, timestamp=False, do_print=True) log("------------------------------------------------", 'MakeNoteQueue-' + currentLog, timestamp=False, do_print=True) timerFull = stopwatch.Timer() for result in pending_queued_items: guid = result['guid'] noteContents = result['contents'] noteTitle = result['title'] line = (" [%-30s] " % ((result['guid']) + ':') ) if result['guid'] else "NEW [%-30s] " % '' success, errors = EN.validateNoteContent(noteContents, noteTitle) validation_status = 1 if success else -1 line = " SUCCESS! " if success else " FAILURE: " line += ' ' if result['guid'] else ' NEW ' # line += ' %-60s ' % (result['title'] + ':') if not success: errors = '\n * ' + '\n * '.join(errors)
def _findAnknotes((val, args)): tmr = stopwatch.Timer(label='finder\\findAnknotes', begin=False) log_banner("FINDANKNOTES SEARCH: " + val.upper().replace('_', ' '), tmr.label, append_newline=False, clear=False) if not hasattr(_findAnknotes, 'note_ids'): _findAnknotes.note_ids = {} if val == 'hierarchical' or val == 'hierarchical_alt' and ( val not in _findAnknotes.note_ids or not ANKNOTES.CACHE_SEARCHES): tmr.reset() val_root = val.replace('hierarchical', 'root') val_child = val.replace('hierarchical', 'child') _findAnknotes((val_root, None), ) _findAnknotes((val_child, None), ) _findAnknotes.note_ids[val] = _findAnknotes.note_ids[ val_root] + _findAnknotes.note_ids[val_child] write_file_contents( " > %s Search Complete: ".ljust(25) % val.upper().replace('_', ' ') + "%-5s --> %3d results" % (tmr.str_long, len(_findAnknotes.note_ids[val])), tmr.label) if not hasattr(_findAnknotes, 'queries'): _findAnknotes.queries = { 'all': get_evernote_model_ids(True), 'sub': 'n.sfld like "%:%"', 'root_alt': "n.sfld NOT LIKE '%:%' AND ank.title LIKE n.sfld || ':%'", 'child_alt': "n.sfld LIKE '%%:%%' AND UPPER(SUBSTR(n.sfld, 0, INSTR(n.sfld, ':'))) IN (SELECT UPPER(title) FROM %s WHERE title NOT LIKE '%%:%%' AND tagNames LIKE '%%,%s,%%') " % (TABLES.EVERNOTE.NOTES, TAGS.TOC), 'orphan_alt': "n.sfld LIKE '%%:%%' AND UPPER(SUBSTR(n.sfld, 0, INSTR(n.sfld, ':'))) NOT IN (SELECT UPPER(title) FROM %s WHERE title NOT LIKE '%%:%%' AND tagNames LIKE '%%,%s,%%') " % (TABLES.EVERNOTE.NOTES, TAGS.TOC) } if val not in _findAnknotes.note_ids or (not ANKNOTES.CACHE_SEARCHES and 'hierarchical' not in val): tmr.reset() if val == 'root': _findAnknotes.note_ids[val] = get_anknotes_root_notes_nids() elif val == 'child': _findAnknotes.note_ids[val] = get_anknotes_child_notes_nids() elif val == 'orphan': _findAnknotes.note_ids[val] = get_anknotes_orphan_notes_nids() elif val in _findAnknotes.queries: pred = _findAnknotes.queries[val] col = 'n.id' table = 'notes n' if 'ank.' in pred: col = 'DISTINCT ' + col table += ', %s ank' % TABLES.EVERNOTE.NOTES sql = 'select %s from %s where ' % (col, table) + pred _findAnknotes.note_ids[val] = ankDB().list(sql) else: return None write_file_contents( " > Cached %s Note IDs: ".ljust(25) % val + "%-5s --> %3d results" % (tmr.str_long, len(_findAnknotes.note_ids[val])), tmr.label) else: write_file_contents( " > Retrieving %3d %s Note IDs from Cache" % (len(_findAnknotes.note_ids[val]), val), tmr.label) log_blank(tmr.label) return "c.nid IN %s" % ids2str(_findAnknotes.note_ids[val])
def add_evernote_notes(self, evernote_notes, update=False, log_update_if_unchanged=True): """ Add Notes to or Update Notes in Anki Database :param evernote_notes: :param update: :param log_update_if_unchanged: :type evernote_notes: list[EvernoteNotePrototype.EvernoteNotePrototype] :type update: bool :return: Count of notes successfully added or updated """ new_nids = [] action_str_base = ['Add', 'Update'][update] action_str = ['Adding', 'Updating'][update] action_preposition = ['To', 'In'][update] info = stopwatch.ActionInfo(action_str + ' Of', 'Evernote Notes', action_preposition + ' Anki', report_if_empty=False) tmr = stopwatch.Timer(evernote_notes, 10, info=info, label='Add\\Anki-%sEvernoteNotes' % action_str_base) for ankiNote in evernote_notes: try: title = ankiNote.FullTitle content = decode(ankiNote.Content) anki_field_info = { FIELDS.TITLE: title, FIELDS.CONTENT: content, FIELDS.EVERNOTE_GUID: FIELDS.EVERNOTE_GUID_PREFIX + ankiNote.Guid, FIELDS.UPDATE_SEQUENCE_NUM: str(ankiNote.UpdateSequenceNum), FIELDS.SEE_ALSO: u'' } except Exception: log_error("Unable to set field info for: Note '%s': '%s'" % (ankiNote.FullTitle, ankiNote.Guid)) log_dump(ankiNote.Content, " NOTE CONTENTS ") # log_dump(encode(ankiNote.Content), " NOTE CONTENTS ") raise tmr.step(title) baseNote = None if update: baseNote = self.get_anki_note_from_evernote_guid(ankiNote.Guid) if not baseNote: log_error( 'Updating note %s: COULD NOT FIND BASE NOTE FOR ANKI NOTE ID' % ankiNote.Guid) tmr.reportStatus(EvernoteAPIStatus.MissingDataError) continue if ankiNote.Tags is None: log_error("Could note find tags object for note %s: %s. " % (ankiNote.Guid, ankiNote.FullTitle)) tmr.reportStatus(EvernoteAPIStatus.MissingDataError) continue anki_note_prototype = AnkiNotePrototype( self, anki_field_info, ankiNote.TagNames, baseNote, notebookGuid=ankiNote.NotebookGuid, count=tmr.count, count_update=tmr.counts.updated.completed.val, max_count=tmr.max) anki_note_prototype._log_update_if_unchanged_ = log_update_if_unchanged nid = tmr.autoStep( anki_note_prototype.update_note() if update else anki_note_prototype.add_note(), ankiNote.FullTitle, update) if tmr.status.IsSuccess and not update: new_nids.append([nid, ankiNote.Guid]) elif tmr.status.IsError: log( "ANKI ERROR WHILE %s EVERNOTE NOTES: " % action_str.upper() + str(tmr.status), tmr.label + '-Error') tmr.Report() if new_nids: ankDB().executemany("UPDATE {n} SET nid = ? WHERE guid = ?", new_nids) return tmr.counts.success
def insert_toc_and_outline_contents_into_notes(self): linked_notes_fields = {} db = ankDB(TABLES.SEE_ALSO) source_guids = db.list( "SELECT DISTINCT s.source_evernote_guid FROM {s} s, {n} n WHERE (s.is_toc = 1 OR " "s.is_outline = 1) AND s.source_evernote_guid = n.guid ORDER BY n.title ASC" ) info = stopwatch.ActionInfo('Insertion of', 'TOC/Outline Contents', 'Into Target Anki Notes') log = Logger('See Also\\8-insert_toc_contents\\', rm_path=True, timestamp=False) tmr = stopwatch.Timer(source_guids, 25, info=info, label=log.base_path) tmr.info.BannerHeader('error') for source_guid in source_guids: note = self.get_anki_note_from_evernote_guid(source_guid) if not note: tmr.reportStatus(EvernoteAPIStatus.NotFoundError) log.error("Could not find note for %s for %s" % (note.guid, tmr.base_name)) continue # if TAGS.TOC in note.tags: # tmr.reportSkipped() # continue for fld in note._model['flds']: if FIELDS.TITLE in fld.get('name'): note_title = note.fields[fld.get('ord')] continue if not note_title: tmr.reportStatus(EvernoteAPIStatus.NotFoundError) log.error("Could not find note title for %s for %s" % (note.guid, tmr.base_name)) continue tmr.step(note_title) note_toc = "" note_outline = "" toc_header = "" outline_header = "" toc_count = 0 outline_count = 0 toc_and_outline_links = db.execute( "source_evernote_guid = '%s' AND (is_toc = 1 OR is_outline = 1) " "ORDER BY number ASC" % source_guid, columns='target_evernote_guid, is_toc, is_outline') for target_evernote_guid, is_toc, is_outline in toc_and_outline_links: if target_evernote_guid in linked_notes_fields: linked_note_contents = linked_notes_fields[ target_evernote_guid][FIELDS.CONTENT] linked_note_title = linked_notes_fields[ target_evernote_guid][FIELDS.TITLE] else: linked_note = self.get_anki_note_from_evernote_guid( target_evernote_guid) if not linked_note: continue linked_note_contents = u"" for fld in linked_note._model['flds']: if FIELDS.CONTENT in fld.get('name'): linked_note_contents = linked_note.fields[fld.get( 'ord')] elif FIELDS.TITLE in fld.get('name'): linked_note_title = linked_note.fields[fld.get( 'ord')] if linked_note_contents: linked_notes_fields[target_evernote_guid] = { FIELDS.TITLE: linked_note_title, FIELDS.CONTENT: linked_note_contents } if linked_note_contents: linked_note_contents = decode(linked_note_contents) if is_toc: toc_count += 1 if toc_count is 1: toc_header = "<span class='header'>TABLE OF CONTENTS</span>: 1. <span class='header'>%s</span>" % linked_note_title else: note_toc += "<br><hr>" toc_header += "<span class='See_Also'> | </span> %d. <span class='header'>%s</span>" % ( toc_count, linked_note_title) note_toc += linked_note_contents else: outline_count += 1 if outline_count is 1: outline_header = "<span class='header'>OUTLINE</span>: 1. <span class='header'>%s</span>" % linked_note_title else: note_outline += "<BR><HR>" outline_header += "<span class='See_Also'> | </span> %d. <span class='header'>%s</span>" % ( outline_count, linked_note_title) note_outline += linked_note_contents if outline_count + toc_count is 0: tmr.reportError(EvernoteAPIStatus.MissingDataError) log.error(" No Valid TOCs or Outlines Found: %s" % note_title) continue tmr.reportSuccess() def makestr(title, count): return '' if not count else 'One %s ' % title if count is 1 else '%s %ss' % ( str(count).center(3), title) toc_str = makestr('TOC', toc_count).rjust(8) #if toc_count else '' outline_str = makestr('Outline', outline_count).ljust( 12) #if outline_count else '' toc_str += ' & ' if toc_count and outline_count else ' ' log.go(" [%4d/%4d] + %s for Note %s: %s" % (tmr.count, tmr.max, toc_str + outline_str, source_guid.split('-')[0], note_title)) if outline_count > 1: note_outline = "<span class='Outline'>%s</span><BR><BR>" % outline_header + note_outline if toc_count > 1: note_toc = "<span class='TOC'>%s</span><BR><BR>" % toc_header + note_toc for fld in note._model['flds']: if FIELDS.TOC in fld.get('name'): note.fields[fld.get('ord')] = note_toc elif FIELDS.OUTLINE in fld.get('name'): note.fields[fld.get('ord')] = note_outline # log.go(' '*16 + "> Flushing Note \r\n") note.flush(intTime()) tmr.Report()
def extract_links_from_toc(self): db = ankDB(TABLES.SEE_ALSO) db.setrowfactory() toc_entries = db.all( "SELECT * FROM {n} WHERE tagNames LIKE '{t_toc}' ORDER BY title ASC" ) db.execute("DELETE FROM {t} WHERE from_toc = 1") log = Logger('See Also\\4-extract_links_from_toc\\', timestamp=False, crosspost_to_default=False, rm_path=True) tmr = stopwatch.Timer(toc_entries, 20, infoStr='Extracting Links', label=log.base_path) tmr.info.BannerHeader('error') toc_guids = [] for toc_entry in toc_entries: toc_evernote_guid, toc_link_title = toc_entry['guid'], toc_entry[ 'title'] toc_guids.append("'%s'" % toc_evernote_guid) # toc_link_html = generate_evernote_span(toc_link_title, 'Links', 'TOC') enLinks = find_evernote_links(toc_entry['content']) tmr.increment(toc_link_title) for enLink in enLinks: target_evernote_guid = enLink.Guid if not check_evernote_guid_is_valid(target_evernote_guid): log.go( "Invalid Target GUID for %-70s %s" % (toc_link_title + ':', target_evernote_guid), 'error') continue base = { 'child_guid': target_evernote_guid, 'uid': enLink.Uid, 'shard': enLink.Shard, 'toc_guid': toc_evernote_guid, 'l1': 'source', 'l2': 'source', 'from_toc': 0, 'is_toc': 0 } query_count = "select COUNT(*) from {t} WHERE source_evernote_guid = '{%s_guid}'" toc = { 'num': 1 + db.scalar(fmt(query_count % 'toc', base)), 'html': enLink.HTML.replace(u'\'', u'\'\''), 'title': enLink.FullTitle.replace(u'\'', u'\'\''), 'l1': 'target', 'from_toc': 1 } # child = {1 + db.scalar(fmt(query_count % 'child', base)), # 'html': toc_link_html.replace(u'\'', u'\'\''), # 'title': toc_link_title.replace(u'\'', u'\'\''), # 'l2': 'target', # 'is_toc': 1 # } query = ( u"INSERT OR REPLACE INTO `{t}`(`{l1}_evernote_guid`, `number`, `uid`, `shard`, " u"`{l2}_evernote_guid`, `html`, `title`, `from_toc`, `is_toc`) " u"VALUES('{child_guid}', {num}, {uid}, '{shard}', " u"'{toc_guid}', '{html}', '{title}', {from_toc}, {is_toc})" ) query_toc = fmt(query, base, toc) db.execute(query_toc) log.go("\t\t - Added %2d child link(s) from TOC %s" % (len(enLinks), encode(toc_link_title))) db.update("is_toc = 1", where="target_evernote_guid IN (%s)" % ', '.join(toc_guids)) db.commit()
def insert_toc_into_see_also(self): db = ankDB() db._db.row_factory = None results = db.all( "SELECT s.target_evernote_guid, s.source_evernote_guid, target_note.title, toc_note.title " "FROM {s} as s, {n} as target_note, {n} as toc_note " "WHERE s.source_evernote_guid != s.target_evernote_guid AND target_note.guid = s.target_evernote_guid " "AND toc_note.guid = s.source_evernote_guid AND s.from_toc == 1 " "ORDER BY target_note.title ASC") # results_bad = db.all( # "SELECT s.target_evernote_guid, s.source_evernote_guid FROM {t_see} as s WHERE s.source_evernote_guid COUNT(SELECT * FROM {tn} WHERE guid = s.source_evernote_guid) )" % ( # TABLES.SEE_ALSO, TABLES.EVERNOTE.NOTES, TABLES.EVERNOTE.NOTES)) all_child_guids = db.list("tagNames NOT LIKE '{t_toc}'", columns='guid') all_toc_guids = db.list("tagNames LIKE '{t_toc}'", columns='guid') grouped_results = {} toc_titles = {} for row in results: target_guid = row[0] toc_guid = row[1] if toc_guid not in all_toc_guids: continue if target_guid not in all_toc_guids and target_guid not in all_child_guids: continue if target_guid not in grouped_results: grouped_results[target_guid] = [row[2], []] toc_titles[toc_guid] = row[3] grouped_results[target_guid][1].append(toc_guid) action_title = 'INSERT TOCS INTO ANKI NOTES' info = stopwatch.ActionInfo('Inserting TOC Links into', 'Anki Notes', 'Anki Notes\' See Only Field') log = Logger('See Also\\5-insert_toc_links_into_see_also\\', rm_path=True) tmr = stopwatch.Timer(len(grouped_results), info=info, label=log.base_path) tmr.info.BannerHeader('new', crosspost=['invalid', 'error']) toc_separator = generate_evernote_span(u' | ', u'Links', u'See Also', bold=False) log.add(' <h1>%s: %d TOTAL NOTES</h1> <HR><BR><BR>' % (action_title, tmr.max), 'see_also_html', timestamp=False, clear=True, extension='htm') logged_missing_anki_note = False sorted_results = sorted(grouped_results.items(), key=lambda s: s[1][0]) for target_guid, target_guid_info in sorted_results: note_title, toc_guids = target_guid_info ankiNote = self.get_anki_note_from_evernote_guid(target_guid) # if tmr.step(): # log.add("INSERTING TOC LINKS INTO NOTE %5s: %s: %s" % ('#' + str(tmr.count), tmr.progress, note_title), # 'progress') if not ankiNote: log.dump(toc_guids, 'Missing Anki Note for ' + target_guid, tmr.label, timestamp=False, crosspost_to_default=False) if not logged_missing_anki_note: log.error( '%s: Missing Anki Note(s) for TOC entry. See %s dump log for more details' % (action_title, tmr.label)) logged_missing_anki_note = True tmr.reportStatus(EvernoteAPIStatus.NotFoundError, title=note_title) continue fields = get_dict_from_list(ankiNote.items()) see_also_html = fields[FIELDS.SEE_ALSO] content_links = find_evernote_links_as_guids( fields[FIELDS.CONTENT]) see_also_whole_links = find_evernote_links(see_also_html) see_also_links = {x.Guid for x in see_also_whole_links} invalid_see_also_links = { x for x in see_also_links if x not in all_child_guids and x not in all_toc_guids } new_tocs = set(toc_guids) - see_also_links if TAGS.TOC_AUTO in ankiNote.tags: new_tocs -= set(content_links) log.dump([ new_tocs, toc_guids, invalid_see_also_links, see_also_links, content_links ], 'TOCs for %s' % fields[FIELDS.TITLE] + ' vs ' + note_title, 'new_tocs', crosspost_to_default=False) new_toc_count = len(new_tocs) invalid_see_also_links_count = len(invalid_see_also_links) if invalid_see_also_links_count > 0: for link in see_also_whole_links: if link.Guid in invalid_see_also_links: see_also_html = remove_evernote_link( link, see_also_html) see_also_links -= invalid_see_also_links see_also_count = len(see_also_links) if new_toc_count > 0: has_ol = u'<ol' in see_also_html has_ul = u'<ul' in see_also_html has_list = has_ol or has_ul see_also_new = " " flat_links = (new_toc_count + see_also_count < 3 and not has_list) toc_delimiter = u' ' if see_also_count is 0 else toc_separator for toc_guid in toc_guids: toc_title = toc_titles[toc_guid] if flat_links: toc_title = u'[%s]' % toc_title toc_link = generate_evernote_link(toc_guid, toc_title, value='TOC') see_also_new += (toc_delimiter + toc_link) if flat_links else ( u'\n<li>%s</li>' % toc_link) toc_delimiter = toc_separator if flat_links: find_div_end = see_also_html.rfind('</div>') if find_div_end > -1: see_also_html = see_also_html[: find_div_end] + see_also_new + '\n' + see_also_html[ find_div_end:] see_also_new = '' else: see_also_toc_headers = { 'ol': u'<br><div style="margin-top:5px;">\n%s</div><ol style="margin-top:3px;">' % generate_evernote_span('<u>TABLE OF CONTENTS</u>:', 'Levels', 'Auto TOC', escape=False) } see_also_toc_headers['ul'] = see_also_toc_headers[ 'ol'].replace('<ol ', '<ul ') if see_also_toc_headers['ul'] in see_also_html: find_ul_end = see_also_html.rfind('</ul>') see_also_html = see_also_html[: find_ul_end] + '</ol>' + see_also_html[ find_ul_end + 5:] see_also_html = see_also_html.replace( see_also_toc_headers['ul'], see_also_toc_headers['ol']) if see_also_toc_headers['ol'] in see_also_html: find_ol_end = see_also_html.rfind('</ol>') see_also_html = see_also_html[: find_ol_end] + see_also_new + '\n' + see_also_html[ find_ol_end:] see_also_new = '' else: header_type = 'ul' if new_toc_count is 1 else 'ol' see_also_new = see_also_toc_headers[ header_type] + u'%s\n</%s>' % (see_also_new, header_type) if see_also_count == 0: see_also_html = generate_evernote_span( u'See Also:', 'Links', 'See Also') see_also_html += see_also_new see_also_html = see_also_html.replace( '<ol>', '<ol style="margin-top:3px;">') log.add( '<h3>%s</h3><br>' % generate_evernote_span(fields[FIELDS.TITLE], 'Links', 'TOC') + see_also_html + u'<HR>', 'see_also_html', crosspost='see_also_html\\' + note_title, timestamp=False, extension='htm') see_also_html = see_also_html.replace('evernote:///', 'evernote://') changed = see_also_html != fields[FIELDS.SEE_ALSO] fields[FIELDS.SEE_ALSO] = see_also_html anki_note_prototype = AnkiNotePrototype( self, fields, ankiNote.tags, ankiNote, count=tmr.counts.handled, count_update=tmr.counts.updated.completed.val, max_count=tmr.max, light_processing=True, steps=[0, 1, 7]) anki_note_prototype._log_update_if_unchanged_ = ( changed or new_toc_count + invalid_see_also_links_count > 0) tmr.autoStep( anki_note_prototype.update_note(error_if_unchanged=changed), note_title, True) crosspost = [] if new_toc_count: crosspost.append('new') if invalid_see_also_links: crosspost.append('invalid') if tmr.status.IsError: crosspost.append('error') log.go(' %s | %2d TOTAL TOC' 's | %s | %s | %s%s' % (format_count('%2d NEW TOC' 's', new_toc_count), len(toc_guids), format_count('%2d EXISTING LINKS', see_also_count), format_count('%2d INVALID LINKS', invalid_see_also_links_count), ('*' if changed else ' ') * 3, note_title), crosspost=crosspost, timestamp=False) db._db.row_factory = sqlite.Row
def processAllRootNotesMissing(self): """:rtype : list[EvernoteTOCEntry]""" DEBUG_HTML = False # log (" CREATING TOC's " , 'tocList', clear=True, timestamp=False) # log ("------------------------------------------------" , 'tocList', timestamp=False) # if DEBUG_HTML: log('<h1>CREATING TOCs</h1>', 'extra\\logs\\toc-ols\\toc-index.htm', timestamp=False, clear=True, extension='htm') ols = [] dbRows = [] returns = [] """:type : list[EvernoteTOCEntry]""" db = ankDB(TABLES.TOC_AUTO) db.delete("1", table=db.table) db.commit() # olsz = None tmr = stopwatch.Timer(self.RootNotesMissing.TitlesList, infoStr='Processing Root Notes', label='RootTitles\\') for rootTitleStr in self.RootNotesMissing.TitlesList: count_child = 0 childTitlesDictSortedKeys = sorted( self.RootNotesMissing.ChildTitlesDict[rootTitleStr], key=lambda s: s.lower()) total_child = len(childTitlesDictSortedKeys) tags = [] outline = self.getNoteFromDB( "UPPER(title) = '%s' AND tagNames LIKE '%%,%s,%%'" % (escape_text_sql(rootTitleStr.upper()), TAGS.OUTLINE)) currentAutoNote = self.getNoteFromDB( "UPPER(title) = '%s' AND tagNames LIKE '%%,%s,%%'" % (escape_text_sql(rootTitleStr.upper()), TAGS.TOC_AUTO)) notebookGuids = {} childGuid = None is_isolated = total_child is 1 and not outline if is_isolated: tmr.counts.isolated.step() childBaseTitle = childTitlesDictSortedKeys[0] childGuid = self.RootNotesMissing.ChildTitlesDict[ rootTitleStr][childBaseTitle] enChildNote = self.RootNotesMissing.ChildNotesDict[ rootTitleStr][childGuid] # tags = enChildNote.Tags log(" > ISOLATED ROOT TITLE: [%-3d]: %-60s --> %-40s: %s" % (tmr.counts.isolated.val, rootTitleStr + ':', childBaseTitle, childGuid), tmr.label + 'Isolated', timestamp=False) else: tmr.counts.created.completed.step() log_blank(tmr.label + 'TOC') log(" [%-3d] %s %s" % (tmr.count, rootTitleStr, '(O)' if outline else ' '), tmr.label + 'TOC', timestamp=False) tmr.step(rootTitleStr) if is_isolated: continue tocHierarchy = TOCHierarchyClass(rootTitleStr) if outline: tocHierarchy.Outline = TOCHierarchyClass(note=outline) tocHierarchy.Outline.parent = tocHierarchy for childBaseTitle in childTitlesDictSortedKeys: count_child += 1 childGuid = self.RootNotesMissing.ChildTitlesDict[ rootTitleStr][childBaseTitle] enChildNote = self.RootNotesMissing.ChildNotesDict[ rootTitleStr][childGuid] if count_child == 1: tags = enChildNote.Tags else: tags = [x for x in tags if x in enChildNote.Tags] if not enChildNote.NotebookGuid in notebookGuids: notebookGuids[enChildNote.NotebookGuid] = 0 notebookGuids[enChildNote.NotebookGuid] += 1 level = enChildNote.Title.Level # childName = enChildNote.Title.Name # childTitle = enChildNote.FullTitle log(" %2d: %d. --> %-60s" % (count_child, level, childBaseTitle), tmr.label + 'TOC', timestamp=False) # tocList.generateEntry(childTitle, enChildNote) tocHierarchy.addNote(enChildNote) realTitle = get_evernote_title_from_guid(childGuid) realTitle = realTitle[0:realTitle.index(':')] # realTitleUTF8 = realTitle.encode('utf8') notebookGuid = sorted(notebookGuids.items(), key=itemgetter(1), reverse=True)[0][0] real_root_title = generateTOCTitle(realTitle) ol = tocHierarchy.GetOrderedList() tocEntry = EvernoteTOCEntry(real_root_title, ol, ',' + ','.join(tags) + ',', notebookGuid) returns.append(tocEntry) dbRows.append(tocEntry.items()) if not DEBUG_HTML: continue # ols.append(ol) # olutf8 = encode(ol) # fn = 'toc-ols\\toc-' + str(tmr.count) + '-' + rootTitleStr.replace('\\', '_') + '.htm' # full_path = os.path.join(FOLDERS.LOGS, fn) # if not os.path.exists(os.path.dirname(full_path)): # os.mkdir(os.path.dirname(full_path)) # file_object = open(full_path, 'w') # file_object.write(olutf8) # file_object.close() # if DEBUG_HTML: log(ol, 'toc-ols\\toc-' + str(count) + '-' + rootTitleStr.replace('\\', '_'), timestamp=False, clear=True, extension='htm') # log("Created TOC #%d:\n%s\n\n" % (count, str_), 'tocList', timestamp=False) if DEBUG_HTML: ols_html = u'\r\n<BR><BR><HR><BR><BR>\r\n'.join(ols) fn = 'toc-ols\\toc-index.htm' file_object = open(os.path.join(FOLDERS.LOGS, fn), 'w') try: file_object.write(u'<h1>CREATING TOCs</h1>\n\n' + ols_html) except Exception: try: file_object.write(u'<h1>CREATING TOCs</h1>\n\n' + encode(ols_html)) except Exception: pass file_object.close() db.executemany( "INSERT INTO {t} (root_title, contents, tagNames, notebookGuid) VALUES(?, ?, ?, ?)", dbRows) db.commit() return returns
def upload_validated_notes(self, automated=False): db = ankDB(TABLES.NOTE_VALIDATION_QUEUE) dbRows = db.all("validation_status = 1") notes_created, notes_updated, queries1, queries2 = ([] for i in range(4)) """ :type: (list[EvernoteNote], list[EvernoteNote], list[str], list[str]) """ noteFetcher = EvernoteNoteFetcher() tmr = stopwatch.Timer(len(dbRows), 25, infoStr="Upload of Validated Evernote Notes", automated=automated, enabled=EVERNOTE.UPLOAD.ENABLED, max_allowed=EVERNOTE.UPLOAD.MAX, label='Validation\\upload_validated_notes\\', display_initial_info=True) if tmr.actionInitializationFailed: return tmr.status, 0, 0 for dbRow in dbRows: entry = EvernoteValidationEntry(dbRow) evernote_guid, rootTitle, contents, tagNames, notebookGuid, noteType = entry.items( ) tagNames = tagNames.split(',') if not tmr.checkLimits(): break whole_note = tmr.autoStep( self.evernote.makeNote(rootTitle, contents, tagNames, notebookGuid, guid=evernote_guid, noteType=noteType, validated=True), rootTitle, evernote_guid) if tmr.report_result is False: raise ValueError if tmr.status.IsDelayableError: break if not tmr.status.IsSuccess: continue if not whole_note.tagNames: whole_note.tagNames = tagNames noteFetcher.addNoteFromServerToDB(whole_note, tagNames) note = EvernoteNotePrototype(whole_note=whole_note) assert whole_note.tagNames assert note.Tags if evernote_guid: notes_updated.append(note) queries1.append([evernote_guid]) else: notes_created.append(note) queries2.append([rootTitle, contents]) else: tmr.reportNoBreak() tmr.Report( self.anki.add_evernote_notes(notes_created) if tmr.counts.created else 0, self.anki.update_evernote_notes(notes_updated) if tmr.counts.updated else 0) if tmr.counts.created.completed.subcount: db.executemany("DELETE FROM {t} WHERE title = ? and contents = ? ", queries2) if tmr.counts.updated.completed.subcount: db.executemany("DELETE FROM {t} WHERE guid = ? ", queries1) if tmr.is_success: db.commit() if tmr.should_retry: create_timer( 30 if tmr.status.IsDelayableError else EVERNOTE.UPLOAD.RESTART_INTERVAL, self.upload_validated_notes, True) return tmr.status, tmr.count, 0
def create_toc_auto(self): db = ankDB() def check_old_values(): old_values = db.first( "UPPER(title) = UPPER(?) AND tagNames LIKE '{t_tauto}'", rootTitle, columns='guid, content') if not old_values: log.go(rootTitle, 'Add') return None, contents evernote_guid, old_content = old_values noteBodyUnencoded = self.evernote.makeNoteBody(contents, encode=False) if type(old_content) != type(noteBodyUnencoded): log.go([rootTitle, type(old_content), type(noteBodyUnencoded)], 'Update\\Diffs\\_') raise UnicodeWarning old_content = old_content.replace('guid-pending', evernote_guid).replace("'", '"') noteBodyUnencoded = noteBodyUnencoded.replace( 'guid-pending', evernote_guid).replace("'", '"') if old_content == noteBodyUnencoded: log.go(rootTitle, 'Skipped') tmr.reportSkipped() return None, None log.go(noteBodyUnencoded, 'Update\\New\\' + rootTitle, clear=True) log.go(generate_diff(old_content, noteBodyUnencoded), 'Update\\Diffs\\' + rootTitle, clear=True) return evernote_guid, contents.replace( '/guid-pending/', '/%s/' % evernote_guid).replace('/guid-pending/', '/%s/' % evernote_guid) update_regex() noteType = 'create-toc_auto_notes' db.delete("noteType = '%s'" % noteType, table=TABLES.NOTE_VALIDATION_QUEUE) NotesDB = EvernoteNotes() NotesDB.baseQuery = ANKNOTES.HIERARCHY.ROOT_TITLES_BASE_QUERY dbRows = NotesDB.populateAllNonCustomRootNotes() notes_created, notes_updated = [], [] """ :type: (list[EvernoteNote], list[EvernoteNote]) """ info = stopwatch.ActionInfo('Creation of Table of Content Note(s)', row_source='Root Title(s)') log = Logger('See Also\\2-%s\\' % noteType, rm_path=True) tmr = stopwatch.Timer(len(dbRows), 25, info, max_allowed=EVERNOTE.UPLOAD.MAX, label=log.base_path) if tmr.actionInitializationFailed: return tmr.status, 0, 0 for dbRow in dbRows: rootTitle, contents, tagNames, notebookGuid = dbRow.items() tagNames = (set(tagNames[1:-1].split(',')) | {TAGS.TOC, TAGS.TOC_AUTO} | ({"#Sandbox"} if EVERNOTE.API.IS_SANDBOXED else set()) ) - {TAGS.REVERSIBLE, TAGS.REVERSE_ONLY} rootTitle = generateTOCTitle(rootTitle) evernote_guid, contents = check_old_values() if contents is None: continue if not tmr.checkLimits(): break if not EVERNOTE.UPLOAD.ENABLED: tmr.reportStatus(EvernoteAPIStatus.Disabled, title=rootTitle) continue whole_note = tmr.autoStep( self.evernote.makeNote(rootTitle, contents, tagNames, notebookGuid, noteType=noteType, guid=evernote_guid), rootTitle, evernote_guid) if tmr.report_result is False: raise ValueError if tmr.status.IsDelayableError: break if not tmr.status.IsSuccess: continue (notes_updated if evernote_guid else notes_created).append( EvernoteNotePrototype(whole_note=whole_note)) tmr.Report( self.anki.add_evernote_notes(notes_created) if tmr.counts.created.completed else 0, self.anki.update_evernote_notes(notes_updated) if tmr.counts.updated.completed else 0) if tmr.counts.queued: db.commit() return tmr.status, tmr.count, tmr.counts.skipped.val
def main(evernote=None, anki=None): # @clockit def print_results(log_folder='Diff\\SeeAlso', full=False, final=False): if final: oldResults = n.old.content.final newResults = n.new.content.final elif full: oldResults = n.old.content.updated newResults = n.new.content.updated else: oldResults = n.old.see_also.updated newResults = n.new.see_also.updated diff = generate_diff(oldResults, newResults) if not 6 in FILES.LOGS.SEE_ALSO_DISABLED: log.plain(diff, log_folder + '\\Diff\\%s\\' % n.match_type + enNote.FullTitle, extension='htm', clear=True) log.plain(diffify(oldResults, split=False), log_folder + '\\Original\\%s\\' % n.match_type + enNote.FullTitle, extension='htm', clear=True) log.plain(diffify(newResults, split=False), log_folder + '\\New\\%s\\' % n.match_type + enNote.FullTitle, extension='htm', clear=True) if final: log.plain(oldResults, log_folder + '\\Final\\Old\\%s\\' % n.match_type + enNote.FullTitle, extension='htm', clear=True) log.plain(newResults, log_folder + '\\Final\\New\\%s\\' % n.match_type + enNote.FullTitle, extension='htm', clear=True) log.plain(diff + '\n', log_folder + '\\__All') # @clockit def process_note(): n.old.content = notes.version.pstrings(enNote.Content) if not n.old.content.regex_original.successful_match: if n.new.see_also.original == "": n.new.content = notes.version.pstrings(n.old.content.original) return False n.new.content = notes.version.pstrings( n.old.content.original.replace( '</en-note>', '<div><span><br/></span></div>' + n.new.see_also.original + '\n</en-note>')) n.new.see_also.updated = str_process(n.new.content.original) n.old.see_also.updated = str_process(n.old.content.original) log.plain(enNote.Guid + '<BR>' + ', '.join(enNote.TagNames) + '<HR>' + enNote.Content + '<HR>' + n.new.see_also.updated, 'SeeAlsoMatchFail\\' + enNote.FullTitle, extension='htm', clear=True) n.match_type = 'V1' else: n.old.see_also = notes.version.pstrings( n.old.content.regex_original.main) n.match_type = 'V2' if n.old.see_also.regex_processed.successful_match: assert True or str_process( n.old.content.regex_original.main ) is n.old.content.regex_processed.main n.old.content.updated = n.old.content.original.replace( n.old.content.regex_original.main, str_process(n.old.content.regex_original.main)) n.old.see_also.useProcessed() n.match_type += 'V3' n.new.see_also.regex_original.subject = n.new.see_also.original + '</en-note>' if not n.new.see_also.regex_original.successful_match: log.plain(enNote.Guid + '\n' + ', '.join(enNote.TagNames) + '\n' + n.new.see_also.original, 'SeeAlsoNewMatchFail\\' + enNote.FullTitle, extension='htm', clear=True) # see_also_replace_old = n.old.content.original.match.processed.see_also.processed.content n.old.see_also.updated = n.old.content.regex_updated.see_also n.new.see_also.updated = n.new.see_also.processed n.match_type += 'V4' else: assert (n.old.content.regex_processed.see_also_content == notes.version.see_also_match( str_process(n.old.content.regex_original.main) ).see_also_content) n.old.see_also.updated = notes.version.see_also_match( str_process( n.old.content.regex_original.main)).see_also_content n.new.see_also.updated = str_process( n.new.see_also.regex_original.see_also_content) n.match_type += 'V5' n.new.content.updated = n.old.content.updated.replace( n.old.see_also.updated, n.new.see_also.updated) def print_results_fail(title, status=None): log.go(title + ' for %s' % enNote.FullTitle, 'NoUpdate') print_results('NoMatch\\SeeAlso') print_results('NoMatch\\Contents', full=True) if status is None: status = EvernoteAPIStatus.GenericError tmr.reportStatus(status) noteType = 'SeeAlso-Step6' db = ankDB() db.delete("noteType = '%s'" % noteType, table=TABLES.NOTE_VALIDATION_QUEUE) results = db.all( "SELECT DISTINCT s.target_evernote_guid, n.* FROM {s} as s, {n} as n " "WHERE s.target_evernote_guid = n.guid AND n.tagNames NOT LIKE '{t_toc}' " "AND n.tagNames NOT LIKE '{t_out}' ORDER BY n.title ASC;") # count_queued = 0 log = Logger('See Also\\6-update_see_also_footer_in_evernote_notes\\', rm_path=True) tmr = stopwatch.Timer(len(results), 25, infoStr='Updating Evernote See Also Notes', label=log.base_path, do_print=True) # log.banner("UPDATING EVERNOTE SEE ALSO CONTENT: %d NOTES" % len(results), do_print=True) notes_updated = [] # number_updated = 0 for result in results: enNote = EvernoteNotePrototype(db_note=result) n = notes() tmr.step(enNote.FullTitle if enNote.Status.IsSuccess else '(%s)' % enNote.Guid) flds = get_anki_fields_from_evernote_guids(enNote.Guid) if not flds: print_results_fail('No Anki Note Found') continue flds = flds.split("\x1f") n.new.see_also = notes.version.pstrings(flds[FIELDS.ORD.SEE_ALSO]) result = process_note() if result is False: print_results_fail('No Match') continue if n.match_type != 'V1' and str_process( n.old.see_also.updated) == n.new.see_also.updated: print_results_fail('Match but contents are the same', EvernoteAPIStatus.RequestSkipped) continue print_results() print_results('Diff\\Contents', final=True) enNote.Content = n.new.content.final if not EVERNOTE.UPLOAD.ENABLED: tmr.reportStatus(EvernoteAPIStatus.Disabled) continue if not evernote: evernote = Evernote() whole_note = tmr.autoStep(evernote.makeNote(enNote=enNote, noteType=noteType), update=True) if tmr.report_result is False: raise ValueError if tmr.status.IsDelayableError: break if tmr.status.IsSuccess: notes_updated.append(EvernoteNotePrototype(whole_note=whole_note)) if tmr.is_success and not anki: anki = Anki() tmr.Report( 0, anki.update_evernote_notes(notes_updated) if tmr.is_success else 0)