def update_keys(keys): keys = (k for k in keys if k.count("/") == 2 and k.split("/")[1] in ["books", "authors", "works"]) update_work.clear_monkeypatch_cache(max_size=10000) count = 0 for chunk in web.group(keys, 100): chunk = list(chunk) count += len(chunk) update_work.update_keys(chunk, commit=False) if count: logger.info("updated %d documents", count) return count
def update_keys(keys): global args keys = (k for k in keys if k.count("/") == 2 and k.split("/")[1] in ["books", "authors", "works"]) update_work.clear_monkeypatch_cache(max_size=10000) print str(args) update_work.load_configs(args.ol_url, args.config, 'default') count = 0 for chunk in web.group(keys, 100): chunk = list(chunk) count += len(chunk) update_work.do_updates(chunk) if count: logger.info("updated %d documents", count) return count
def update_keys(keys): global args keys = (k for k in keys if k.count("/") == 2 and k.split("/")[1] in ["books", "authors", "works"]) update_work.clear_monkeypatch_cache(max_size=10000) print str(args) update_work.load_configs(args.ol_url,args.config,'default') count = 0 for chunk in web.group(keys, 100): chunk = list(chunk) count += len(chunk) update_work.do_updates(chunk) # update_work.update_keys(chunk, commit=False) if count: logger.info("updated %d documents", count) return count
def update_keys(keys): if not keys: return 0 # FIXME: Some kind of hack introduced to work around DB connectivity issue global args logger.debug("Args: %s" % str(args)) update_work.load_configs(args.ol_url, args.config, 'default') keys = (k for k in keys if k.count("/") == 2 and k.split("/")[1] in ["books", "authors", "works"]) update_work.clear_monkeypatch_cache(max_size=10000) count = 0 for chunk in web.group(keys, 100): chunk = list(chunk) count += len(chunk) update_work.do_updates(chunk) if count: logger.info("updated %d documents", count) return count