def add_translations(f, callback): join_words = lambda cs: (words() ^ 'n' | where(tag_is_not('ignored')) | join(cs, f_note_pk, f_note_fk)) checked_cards = lambda: cards() ^ 'c' | where(is_checked( ), deck_is('unsorted'), is_not_suspended()) new_cards = lambda: checked_cards() | where(is_recent()) data = execute(db(), join_words(new_cards()) | select('n.id', '@flds')) (nids, fss) = zip(*data) or ([], []) ws = map(lambda flds: splitFields(flds)[api.get_field('word', 'Words')], fss) map( lambda (nid, tr): api.upd_note(nid, {'Translation': tr}, [] if len(tr) > 0 else ['ignored']), zip(nids, dmap(f, ws, callback, conf.feedback_time))) execute( db(), cards() ^ 'c2' | where(tmpl_is('word', 'filtered')) | join(join_words(new_cards()), '@nid', 'n.id') | update(set_deck('filtered'), set_recent(), set_learning()) | with_pk('@id')) execute(db(), checked_cards() | update(set_suspended()))
def fetch_text(text_id): [src] = execute(db(), texts() | where('@id = ' + str(text_id)) | select('@sfld')) text = '' # remote file, assume this is html-page if re.match(r' *https?://', src) is not None: text = scrape(src) # local file, assume this is plain text in unicode else: f = open(src, 'r') text = f.read().decode('utf-8') f.close() api.upd_note(text_id, {'Text': text}, ['fetched'])
def parse_text(text_id): dictionary = map(str, execute(db(), (words() | select(f_stem)))) (new, nfo) = parse(api.get_text(text_id), dictionary) map( lambda stem: api.add_note('word', 'words', { 'Stem': stem, 'TextId': str(text_id), 'Count': str(0) }), new) map( lambda (stem, (count, word, context)): api.upd_note( api.get_note_id('word', stem), { 'Count': lambda ov: str(int(ov) + count), 'Context': lambda ov: (ov if ov != empty_field() else '') + context, 'Words': lambda ov: (ov if ov != empty_field() else '') + word }), nfo.iteritems()) api.upd_note(text_id, {}, ['parsed'])