def metadata_update(): """ Handle form submission for updating location metadata info """ form = request.form tablename = form.get('tablename', '') context = {} if tablename: reserved = ['tablename'] # parse and gather attributes for each part metadatas = defaultdict(dict) for k, v in form.items(): if k in reserved: continue idx = k.rindex('_') name, id = k[:idx], int(k[idx+1:]) metadatas[id][name] = v user_inputs = [] for mid, md in metadatas.items(): try: if int(md.get('loc_type', '-1')) == -1: continue if not md.get('format', '').strip(): continue if not md.get('col_name', '').strip(): continue if mid >= 0: md['deleted'] = bool(int(md['deleted'])) md['id'] = mid except: pass user_inputs.append(md) lmds = create_metadata_from_user_inputs(db.session, tablename, user_inputs) reprocess_lmds = update_metadata(db.session, tablename, lmds) reprocess_ids = [lmd.id for lmd in reprocess_lmds] print "reprocessing lmds: ", reprocess_ids # cancel tasks related to the table, then start up the # pipeline. This works because the cron job executes # tasks serially in FIFO tasks.cancel_tasks(g.task_session, tablename) tasks.run_extractor(g.task_session, tablename, ids=reprocess_ids) g.task_session.commit() return redirect('/')
def test_cron(session): import locjoin.tasks.tasks as tasks tasks.wait(session, 3) tasks.wait(session, 2) tasks.cancel_tasks(session,3) tasks.cancel_tasks(session,2)