def main( server, passwordsf, mail_lang, tls, dry_run, mail_server, mail_port, mail_login, mail_from, mail_pw, ): tls = as_bool(tls) dry_run = as_bool(dry_run) if not mail_from: mail_from = mail_login assert mail_login assert mail_pw L.info("start") with open(passwordsf, "r") as fic: passwords = json.loads(fic.read()) for login, password in passwords.items(): notify_access( login, password, server, mail_lang, tls, dry_run, mail_server, mail_port, mail_login, mail_from, mail_pw, )
def main(jsonf, passwordsf, skippedusers): if skippedusers: skippedusers = re.compile(skippedusers, flags=re.I | re.M) L.info("start") client = Client() client.sync() with open(passwordsf, "r") as fic: passwords = json.load(fic) constructed = DONE["constructed"] # either create or edit passwords parallel = as_bool(os.environ.get("BW_PARALLEL_IMPORT", "1")) # parallel = False processes = int(os.environ.get("BW_PARALLEL_IMPORT_PROCESSES", "10")) items = [] for n, secretd in passwords.items(): items.append((client, n, secretd, constructed)) if parallel: with Pool(processes=processes) as pool: res = pool.starmap_async(record, items) res.wait() for ret in res.get(): if not ret: continue constructed[ret[0].id] = ret else: for n, secretd in passwords.items(): record(client, n, secretd, constructed) return constructed
def main(login, ): assert login L.info("start") client = Client() client.sync() try: user = client.get_user(email=login) client.disable_user(user=user) except bwclient.UserNotFoundError: pass
def do_confirm_invitations(client, email, oid, aclargs): orga = aclargs["orga"] try: L.info(f"Confirming {email} to {orga.name}/{orga.id}") ret = client.confirm_invitation(aclargs["orga"], email) return {(email, oid): ret} except Exception as exc: trace = traceback.format_exc() L.error(f"Error while confirming {email}\n{orga.name}\n{trace}") DONE["errors"][(email, oid)] = (exc, trace)
def record(client, email, secretd, constructed): try: return client.delete_user(email) except bwclient.UserNotFoundError: pass except Exception as exc: trace = traceback.format_exc() print(trace) sid = email L.error(f"Error while creating {sid}\n{trace}") DONE["errors"][sid] = exc
def add_to_orga(client, email, oid, aclargs): orga = aclargs["orga"] try: L.info(f"Adding {email} to orga: {orga.name}/{orga.id}") ret = client.set_organization_access(email, aclargs["orga"], **aclargs["payload"]) return {(email, oid): ret} except Exception as exc: trace = traceback.format_exc() L.error(f"Error while creating {email}\n{orga.name}\n{trace}") DONE["errors"][(email, oid)] = (exc, trace)
def add_to_collection(client, email, cid, aclargs): collection = aclargs["collection"] try: L.info( f"Adding {email} to collection: {collection.name}/{collection.id}") ret = client.set_collection_access(email, aclargs["collection"], **aclargs["payload"]) return {(email, cid): ret} except Exception as exc: trace = traceback.format_exc() L.error(f"Error while creating {email}\n{collection.name}\n{trace}") DONE["errors"][(email, cid)] = (exc, trace)
def main(jsonf, server, email, password, assingleorg): L.info("start") client = Client(server, email, password) client.sync() orgas = client.get_organizations() orgas_to_delete = OrderedDict() for jsonff in jsonf.split(":"): with open(jsonff) as fic: data = json.load(fic) if assingleorg: orga = {"bw": None, "name": data["name"], "collections": OrderedDict()} v = orga["name"] v = sanitize(orga["name"]) data["vaults"] = [orga] for vdata in data["vaults"]: v = sanitize(vdata["name"]) try: orgas_to_delete[v.lower()] except KeyError: try: ods = orgas["name"][v.lower()] except KeyError: continue for ix, (_, o) in enumerate(ods.items()): orgas_to_delete[f"{v}{ix}".lower()] = { "bw": o, "vault": vdata, "name": v, "collections": OrderedDict(), } L.info(f"Will delete orga {v}") # either create or edit passwords parallel = as_bool(os.environ.get("BW_PARALLEL_IMPORT", "1")) # parallel = False processes = int(os.environ.get("BW_PARALLEL_IMPORT_PROCESSES", "30")) items = [] for org, odata in orgas_to_delete.items(): if odata["bw"] is not None: items.append([client, odata]) if parallel: with Pool(processes=processes) as pool: results = pool.starmap_async(record, items) results.wait() else: for item in items: record(*item)
def record(client, email, secretd, constructed): try: user = client.get_user(email=email) if not user.emailVerified: user = client.validate(email) return user, secretd["password"] except bwclient.UserNotFoundError: pass try: return client.create_user(email, name=email.split("@")[0], password=secretd["password"]) except Exception as exc: trace = traceback.format_exc() print(trace) sid = email L.error(f"Error while creating {sid}\n{trace}") DONE["errors"][sid] = exc
def record(client, cipherd): try: secret = cipherd["secret"] cipherd = assemble(cipherd) bw = cipherd["bw"] for action in cipherd["actions"]: if action in ["create", "edit"]: bw = getattr(client, action)(**cipherd["patch"]) break # attachments if "attach" in cipherd["actions"]: filename = secret["blob_meta"]["filename"] filepath = f'{EXPORT_DIR}/{secret["id"]}/{filename}' client.attach(bw, filepath) return bw except Exception as exc: trace = traceback.format_exc() sid = cipherd["secret"]["id"] L.error(f"Error while creating {sid}\n{trace}") DONE["errors"][sid] = exc
def main(jsonf, server, email, password, assingleorg): L.info("start") client = Client(vaultier=True) client.sync() ciphers_to_import = OrderedDict() vaultier_secrets = {} for jsonff in jsonf.split(":"): with open(jsonff) as fic: data = json.load(fic) orga = {} if assingleorg: organ = data["name"] orga = client.get_organization(organ) for iv, vdata in enumerate(data["vaults"]): v = vdata["name"] if not vdata["cards"]: L.info(f"Skipping {v} as it has no cards") continue if not assingleorg: orga = client.get_organization(v) collections = client.get_collections(orga) for cdata in vdata["cards"]: cn = sanitize(cdata["name"]) vc = cn if assingleorg: vc = f"{v} {cn}" collection = client.get_collection(vc, collections=collections) cid = collection.id for ix, secret in enumerate(cdata["secrets"]): sid = f"{secret['id']}" vaultier_secrets[sid] = secret sd = secret.get("data", {}) idata = { "vault": vdata, "card": cdata, "sid": sid, "actions": [], "secret": secret, "collection": collection, "collections": [], "orga": orga, "patch": {}, "bw": None, } sname = get_name(idata) idata["name"] = sname try: sec = client.get_cipher( sid, vc, collections=collections, orga=orga, vaultier=True, sync=False, ) if sec.vaultiersecretid != sid: raise SecretNotFound() idata["bw"] = sec edit = False # # vaultier otypes: 200: secret, 300, files, 100: Note # if secret["type"] in [ VAULTIER_SECRET.file, VAULTIER_SECRET.secret, ]: login = getattr(sec, "login", {}) or {} if any(( (login.get("username", "") or "") != (sd.get("username") or ""), (login.get("password", "") or "") != (sd.get("password") or ""), )): edit = True uris = login.get("uris", {}) or {} urls = [ a.get("uri", "") for a in uris if a.get("uri", "") ] if sd.get("url") and (sd["url"] not in urls): edit = True if secret["type"] in [a for a in VAULTIER_SECRET]: if any(( (sec.name or "") != (sname or ""), (sec.notes or "") != (get_note(idata) or ""), )): edit = True if edit: idata["actions"].append("edit") L.info( f"Will patch already existing {sec.name} in {vc}" ) if secret["type"] == VAULTIER_SECRET.file: fn = secret["blob_meta"]["filename"] try: filenames = [ a["fileName"] for a in client.get_attachments(sec) ] except NoAttachmentsError: filenames = [] if fn not in filenames: idata["actions"].append("attach") else: L.info( f"Already attached {fn} to {sec.name}/{sec.id} in {vc}" ) if cid not in sec.collectionIds: idata["actions"].append("link") idata["collections"] = [cid] + (sec.collectionIds or []) L.info(f"Will link {sec.name} in {vc}") if not idata["actions"]: L.info( f"Already created {sec.name}/{sec.id} in {vc}") except SecretNotFound: try: ciphers_to_import[sid] except KeyError: idata["actions"].append("create") if secret["type"] == VAULTIER_SECRET.file: idata["actions"].append("attach") idata["collections"] = [cid] idata["actions"].append("link") L.info(f'Will create {secret["name"]} in {vc}') if idata["actions"]: ciphers_to_import[sid] = idata constructed = OrderedDict() # either create or edit passwords parallel = as_bool(os.environ.get("BW_PARALLEL_IMPORT", "1")) # parallel = False processes = int(os.environ.get("BW_PARALLEL_IMPORT_PROCESSES", "10")) if parallel: with Pool(processes=processes) as pool: res = pool.starmap_async( record, [(client, cipherd) for n, cipherd in ciphers_to_import.items()]) res.wait() for ret in res.get(): if not ret: continue constructed[ret.id] = ret else: for n, cipherd in ciphers_to_import.items(): ret = record(client, cipherd) if not ret: continue constructed[ret.id] = ret return constructed
def main(jsonf, passwordsf, skippedusers): if skippedusers: skippedusers = re.compile(skippedusers, flags=re.I | re.M) L.info("start") client = Client() client.sync() vaultier_members = {} for jsonff in jsonf.split(":"): with open(jsonff) as fic: data = json.load(fic) # optim: load all secrets once for vdata in data["vaults"]: v = vdata["name"] for i in vdata["acls"]: vaultier_members.setdefault(i, {}) for cdata in vdata["cards"]: c = cdata["name"] for i in cdata["acls"]: vaultier_members.setdefault(i, {}) n = sanitize(f"{v} {c}") for ix, secret in enumerate(cdata["secrets"]): pass # unload skipped users for i in [a for a in vaultier_members]: if skippedusers and skippedusers.search(i): L.info(f"Skip {i}") vaultier_members.pop(i, None) # assign passwords if os.path.exists(passwordsf): with open(passwordsf, "r") as fic: passwords = json.loads(fic.read()) else: passwords = {} for i, idata in vaultier_members.items(): try: pw = passwords[i] except KeyError: pw = passwords[i] = bwcrypto.gen_password() vaultier_members[i]["password"] = pw with open(passwordsf, "w") as fic: json.dump(passwords, fic, indent=2, sort_keys=True) constructed = DONE["constructed"] # either create or edit passwords parallel = as_bool(os.environ.get("BW_PARALLEL_IMPORT", "1")) # parallel = False processes = int(os.environ.get("BW_PARALLEL_IMPORT_PROCESSES", "10")) items = [] for n, secretd in vaultier_members.items(): items.append((client, n, secretd, constructed)) if parallel: with Pool(processes=processes) as pool: res = pool.starmap_async(record, items) res.wait() for ret in res.get(): if not ret: continue constructed[ret[0].id] = ret else: for n, secretd in vaultier_members.items(): record(client, n, secretd, constructed) return constructed
def export_workspace(client, workspace_id=None): if workspace_id is None: workspace_id = os.environ.get("VAULTIER_WORKSPACE_ID", "1") raw = as_bool(os.environ.get("VAULTIER_RAW", "1")) try: workspace = client.get_workspace(workspace_id) except Exception as e: raise SystemExit(e) directory = EXPORT_DIR if not os.path.isdir(directory): try: os.makedirs(directory) except Exception as e: raise SystemExit(e) if not raw: zip_filename = "{}.{}.{}".format(workspace.id, workspace.name, "zip") try: zipfile = ZipFile(os.path.join(directory, zip_filename), "w", ZIP_DEFLATED) except Exception as e: raise SystemExit(e) workspace_data = { "id": workspace.id, "name": workspace.name, "description": workspace.description, "acls": workspace.acls, "vaults": [], } workspace_fn = re.sub(" |'", "__", workspace.name) json_file = os.path.join(directory, "{}.json".format(workspace_fn)) pretty_json_file = json_file.replace(".json", ".pretty.json") vaults = client.list_vaults(workspace.id) L.info(f"Exporting {workspace.name} / {workspace.id}") all_secrets = [] for idx, vault in enumerate(vaults): vault_data = { "id": vault.id, "name": vault.name, "description": vault.description, "color": vault.color, "acls": vault.acls, "cards": [], } L.info("Export vault {name}/{id}".format(**vault_data)) cards = client.list_cards(vault.id) for numcard, card in enumerate(cards): card_data = { "id": card.id, "name": card.name, "description": card.description, "acls": card.acls, "secrets": [], } L.info("Export card {0[name]}/{1[name]}".format( vault_data, card_data)) secrets = client.list_secrets(card.id) if not secrets: if not card.description: L.info(f"Not secrets for card, not exporting {card.name} /" f" {card.id} / {card.slug} / {card.description}") continue else: # handle misused cards as secrets... description = f"{card.name}\n{card.description}" card_data["secrets"].append({ "id": f"99{idx}{numcard}42421", "name": card.name, "type": 100, "data": { "note": description }, }) for secret in secrets: secret = client.decrypt_secret(secret, workspace.workspaceKey) secret_data = { "id": secret.id, "name": secret.name, "type": secret.type, } L.info("Export card {0[name]}/{1[name]}{2[name]}".format( vault_data, card_data, secret_data)) if secret.data: secret_data["data"] = secret.data if secret.blobMeta: secret_data["blob_meta"] = secret.blobMeta secret_file = client.get_file(secret.id) if secret_file != [None, None]: try: os.makedirs(os.path.join(directory, str(secret.id)), exist_ok=True) file_name = os.path.join(directory, str(secret.id), secret_file[0]) write_binary_file(file_name, secret_file[1]) except Exception as e: raise SystemExit(e) if not raw: zipfile.write( file_name, os.path.join(str(secret.id), secret_file[0])) os.remove(file_name) os.rmdir(os.path.join(directory, str(secret.id))) card_data["secrets"].append(secret_data) all_secrets.append((secret, secret_data)) vault_data["cards"].append(card_data) workspace_data["vaults"].append(vault_data) try: write_json_file(json_file, workspace_data) except Exception as e: raise SystemExit(e) with open(json_file.replace(".json", ".pretty.json"), "w") as file: json.dump(workspace_data, file, indent=2, sort_keys=True) if not raw: zipfile.write(json_file, os.path.basename(json_file)) zipfile.write(json_file, os.path.basename(pretty_json_file)) os.remove(json_file) os.remove(pretty_json_file) zipfile.close() L.info(f"Exported {workspace.name} / {workspace.id}")
def main(): L.info("start") client = configure_client(CFG) export_workspaces(client)
def main(jsonf, server, email, password, assingleorg, skippedusers): skipped_users_re = re.compile(skippedusers) L.info("start") client = Client(vaultier=True) if skippedusers: skippedusers = re.compile(skippedusers) client.sync() users_orgas = {} users_collections = {} caccesses = {} al = set() orgas = {} for jsonff in jsonf.split(":"): with open(jsonff) as fic: data = json.load(fic) orga = OrderedDict() collections = None oacls = data["acls"] if assingleorg: organ = data["name"] orga = client.get_organization(organ) collections = client.get_collections(orga=orga, sync=True) else: oacls = OrderedDict([(k, v) for k, v in oacls.items() if v >= 200]) coacls = oacls for iv, vdata in enumerate(data["vaults"]): if collections is None: collections = client.get_collections(orga=orga, sync=True) v = vdata["name"] vacls = vdata["acls"] if not vdata["cards"]: L.info(f"Skipping {v} as it has no cards") continue if not assingleorg: orga = client.get_organization(v) coacls = copy.deepcopy(oacls) coacls.update(vacls) oadmins = [a for a in coacls if coacls[a] == 200] eorga = orgas.setdefault(orga.id, {"orga": orga, "emails": set()}) for email, acle in coacls.items(): if skipped_users_re.search(email): log = f"{email} is old user, skipping" continue payload = {} payload["access_level"] = AL.admin if int(acle) >= 200: payload["accessAll"] = True if skippedusers and skippedusers.search(email): L.info(f"{email} is skipped") continue log = None eorga["emails"].add(email) try: uaccess = client.get_accesses({ "user": email, "orga": orga }) except bwclient.NoAccessError: bwacl = None else: oaccess = uaccess["oaccess"] bwacl = oaccess["daccess"].get(email, None) if (bwacl and (bwacl["type"] in [AL.admin, AL.manager]) and (payload["access_level"] == bwacl["type"])): log = f"User {email} is already in orga {orga.name} with right acls" if log: if log not in al: L.info(log) al.add(log) continue access = {"orga": orga, "payload": payload} ak = (orga.id, email) users_orgas[ak] = access for cdata in vdata["cards"]: cn = sanitize(cdata["name"]) vc = cn if assingleorg: vc = f"{v} {cn}" collection = client.get_collection(vc, collections=collections, orga=orga) try: caccess = caccesses[collection.id] except KeyError: caccess = caccesses[collection.id] = client.get_accesses( collection) cacls = copy.deepcopy(vacls) cacls.update(cdata["acls"]) for email, cacl in cacls.items(): eorga["emails"].add(email) if skippedusers and skippedusers.search(email): L.info(f"{email} is skipped") continue log = None if email in oadmins: continue if skipped_users_re.search(email): log = f"{email} is old user, skipping" if email in caccess["emails"]: log = f"User {email} is already in collection {collection.name}" if log: if log not in al: L.info(log) al.add(log) continue payload = {} access = {"collection": collection, "payload": payload} ak = (collection.id, email) caccess = caccesses[collection.id] = client.get_accesses( collection) users_collections[ak] = access # either create or edit passwords parallel = as_bool(os.environ.get("BW_PARALLEL_IMPORT", "1")) # parallel = False processes = int(os.environ.get("BW_PARALLEL_IMPORT_PROCESSES", "10")) constructed = OrderedDict() # invite users to orga record = add_to_orga # users_orgas = dict([(k, users_orgas[k]) for i, k in enumerate(users_orgas) if i<3]) L.info("add_to_orga") if parallel: with Pool(processes=processes) as pool: res = pool.starmap_async( record, [(client, email, oid, aclargs) for (oid, email), aclargs in users_orgas.items()], ) res.wait() for ret in res.get(): if not ret: continue constructed.update(ret) else: for (oid, email), aclargs in users_orgas.items(): ret = record(client, email, oid, aclargs) if not ret: continue constructed.update(ret) # invite users to collection record = add_to_collection L.info("add_to_collection") # users_collections = dict([(k, users_collections[k]) for i, k in enumerate(users_collections) if i < 13]) if parallel: with Pool(processes=processes) as pool: res = pool.starmap_async( record, [(client, email, cid, aclargs) for (cid, email), aclargs in users_collections.items()], ) res.wait() for ret in res.get(): if not ret: continue constructed.update(ret) else: for (cid, email), aclargs in users_collections.items(): ret = record(client, email, cid, aclargs) if not ret: continue constructed.update(ret) # autoaccept user invitation accept_invitations = OrderedDict() for orga, odata in orgas.items(): oaccess = client.get_accesses(odata["orga"]) for email in odata["emails"]: try: acl = oaccess["daccess"][email] except KeyError: continue else: # status: Invited = 0, Accepted = 1, Confirmed = 2, if acl["status"] == 0: accept_invitations[(orga, email)] = {"orga": odata["orga"]} record = do_accept_invitations L.info("do_accept_invitations") # users_collections = dict([(k, users_collections[k]) for i, k in enumerate(users_collections) if i < 13]) if parallel: with Pool(processes=processes) as pool: res = pool.starmap_async( record, [(client, email, oid, aclargs) for (oid, email), aclargs in accept_invitations.items()], ) res.wait() for ret in res.get(): if not ret: continue constructed.update(ret) else: for (oid, email), aclargs in accept_invitations.items(): ret = record(client, email, oid, aclargs) if not ret: continue constructed.update(ret) # autoconfirm user invitation confirm_invitations = OrderedDict() for orga, odata in orgas.items(): oaccess = client.get_accesses(odata["orga"]) for email in odata["emails"]: try: acl = oaccess["daccess"][email] except KeyError: continue else: # status: Invited = 0, Accepted = 1, Confirmed = 2, if acl["status"] == 1: confirm_invitations[(orga, email)] = { "orga": odata["orga"] } record = do_confirm_invitations L.info("do_confirm_invitations") # users_collections = dict([(k, users_collections[k]) for i, k in enumerate(users_collections) if i < 13]) if parallel: with Pool(processes=processes) as pool: res = pool.starmap_async( record, [(client, email, oid, aclargs) for (oid, email), aclargs in confirm_invitations.items()], ) res.wait() for ret in res.get(): if not ret: continue constructed.update(ret) else: for (oid, email), aclargs in confirm_invitations.items(): ret = record(client, email, oid, aclargs) if not ret: continue constructed.update(ret) return constructed
def main( login, password, register_to, server, mail_lang, tls, dry_run, mail_server, mail_port, mail_login, mail_from, mail_pw, notify, passwordsf, ): tls = as_bool(tls) dry_run = as_bool(dry_run) notify = as_bool(notify) passwords = {} if not os.path.exists(passwordsf): with open(passwordsf, "w") as fic: fic.write("{}") with open(passwordsf) as fic: passwords = json.load(fic) try: password = passwords[login] except KeyError: try: assert password except AssertionError: password = secrets.token_hex(32) write = passwords.get(login, "") != password passwords[login] = password if write: with open(passwordsf, "w") as fic: json.dump(passwords, fic, indent=2, sort_keys=True) if not mail_from: mail_from = mail_login assert login assert password assert mail_login assert mail_pw L.info("start") client = Client() client.sync() try: user = client.get_user(email=login) if not user.emailVerified: user = client.validate(login) except bwclient.UserNotFoundError: client.create_user(login, name=login.split("@")[0], password=password, auto_validate=True) for i in register_to: client.set_organization_access( login, i, access_level=bwclient.CollectionAccess.admin, accessAll=True) try: client.accept_invitation(i, login) except bwclient.AlreadyInvitedError: pass try: client.confirm_invitation(i, login) except bwclient.AlreadyConfirmedError: pass if notify: notify_access( login, password, server, mail_lang, tls, dry_run, mail_server, mail_port, mail_login, mail_from, mail_pw, )
def main(jsonf, server, email, password, assingleorg): L.info("start") client = Client(server, email, password) client.api_sync() orgas_to_import = OrderedDict() for jsonff in jsonf.split(":"): with open(jsonff) as fic: data = json.load(fic) orga = {} if assingleorg: organ = data["name"] try: orgao = client.get_organization(organ) L.info(f"Already created orga {organ}") except bwclient.OrganizationNotFound: orgao = None L.info(f"Will create orga: {organ}") for vi, vdata in enumerate(data["vaults"]): v = sanitize(vdata["name"]) if not vdata["cards"]: L.info(f"Skipping {v} as it has no cards") continue if not assingleorg: orga = {} organ = v try: orgas_to_import[organ.lower()] except KeyError: try: orgao = client.get_organization(v) L.info(f"Already created orga {v}") except bwclient.OrganizationNotFound: orgao = None L.info(f"Will create orga: {v}") orga.update({"bw": orgao, "name": organ}) orga.setdefault("collections", OrderedDict()) orgas_to_import.setdefault(organ.lower(), orga) for cdata in vdata["cards"]: cn = sanitize(cdata["name"]) vc = cn if assingleorg: vc = f"{v} {cn}" try: orga["collection_name"] except KeyError: orga["collection_name"] = vc L.info(f"{vc} is Default Collection") continue try: if not orga["bw"]: raise KeyError() client.get_collection(vc, orga=orga["bw"]) L.info(f"Already created {vc}") except (bwclient.CollectionNotFound, KeyError): try: orga["collections"][vc.lower()] except KeyError: L.info(f"Will create {vc} in orga: {v}") orga["collections"][vc.lower()] = {"card": cdata, "name": vc} orga.setdefault("collection_name", vc) constructed = OrderedDict() parallel = as_bool(os.environ.get("BW_PARALLEL_IMPORT", "1")) processes = int(os.environ.get("BW_PARALLEL_IMPORT_PROCESSES", "100")) # create orgas items = [] for org, odata in orgas_to_import.items(): if odata["bw"] is None: items.append([client, org, odata, email, constructed]) if parallel: with Pool(processes=processes) as pool: res = pool.starmap_async(record_orga, items) res.wait() for ret in res.get(): if not ret: continue org, orga = ret orgas_to_import[org]["bw"] = constructed[orga.id] = orga else: for item in items: ret = record_orga(*item) org, orga = ret orgas_to_import[org]["bw"] = constructed[orga.id] = orga client.refresh() # create collections items = [] for i, o in orgas_to_import.items(): for col, c in o["collections"].items(): items.append([client, i, o["bw"].id, col, c, constructed]) if parallel: with Pool(processes=processes) as pool: res = pool.starmap_async(record, items) res.wait() for ret in res.get(): if not ret: continue constructed[ret.id] = ret else: for item in items: record(*item)