def _scread_texts_fetch(): core.unfold_batches() texts = core.get_empty_texts() if len(texts) > 0: dmap(core.fetch_text, texts, show_progress, conf.feedback_time) else: tooltip('There is no links.')
def add_translations(f, callback): join_words = lambda cs: (words() ^ 'n' | where(tag_is_not('ignored')) | join(cs, f_note_pk, f_note_fk)) checked_cards = lambda: cards() ^ 'c' | where(is_checked( ), deck_is('unsorted'), is_not_suspended()) new_cards = lambda: checked_cards() | where(is_recent()) data = execute(db(), join_words(new_cards()) | select('n.id', '@flds')) (nids, fss) = zip(*data) or ([], []) ws = map(lambda flds: splitFields(flds)[api.get_field('word', 'Words')], fss) map( lambda (nid, tr): api.upd_note(nid, {'Translation': tr}, [] if len(tr) > 0 else ['ignored']), zip(nids, dmap(f, ws, callback, conf.feedback_time))) execute( db(), cards() ^ 'c2' | where(tmpl_is('word', 'filtered')) | join(join_words(new_cards()), '@nid', 'n.id') | update(set_deck('filtered'), set_recent(), set_learning()) | with_pk('@id')) execute(db(), checked_cards() | update(set_suspended()))
def test_dmap(): cl = Closure() cl.log = [] square = lambda x: x * x n = 10 data = range(0, n) def report(i, n): cl.log.append(i) cl.log[:] = [] slow_square = delayed(0.05)(lambda x: x * x) assert dmap(slow_square, data, report, 0.1) == map(square, data) assert len(cl.log) == 4 cl.log[:] = [] fast_square = delayed(0.025)(lambda x: x * x) assert dmap(fast_square, data, report, 0.1) == map(square, data) assert len(cl.log) == 2
def update_estimations(callback): text_ids = execute( db(), texts() | where(tag_is('parsed'), tag_is_not('available')) | select('@id')) query_all = lambda: words() query_checked = lambda: (words() ^ 'n' | join( cards() ^ 'c' | where( tmpl_is('word', 'unsorted'), is_suspended( )), '@id', '@nid')) query_learning = lambda: (words() ^ 'n' | join( cards() ^ 'c' | where( tmpl_is('word', 'filtered'), is_suspended()), '@id', '@nid')) estim = {} map( lambda (q, val): estim.update(dict(execute(db(), q() | select(stem(), val)))), [(query_all, '0'), (query_checked, '1'), (query_learning, maturity())]) availability = dmap(lambda text: estimate(text, estim), map(api.get_text, text_ids), callback, conf.feedback_time) changed_ids = map(itemgetter(0), filter(itemgetter(1), zip(text_ids, availability))) tg().bulkAdd(changed_ids, conf.tags['available']) execute(db(), (cards() ^ 'c' | where(deck_is('texts')) | join(texts() ^ 'n' | where(tag_is('available')), '@nid', '@id') | update(set_deck('available')) | with_pk('@id')))
def _scread_texts_parse_all(): _mk_parse_text( lambda texts: dmap(core.parse_text, texts, show_progress, conf. feedback_time), core.get_new_texts())