def main(jsonf, server, email, password, assingleorg): L.info("start") client = Client(server, email, password) client.sync() orgas = client.get_organizations() orgas_to_delete = OrderedDict() for jsonff in jsonf.split(":"): with open(jsonff) as fic: data = json.load(fic) if assingleorg: orga = {"bw": None, "name": data["name"], "collections": OrderedDict()} v = orga["name"] v = sanitize(orga["name"]) data["vaults"] = [orga] for vdata in data["vaults"]: v = sanitize(vdata["name"]) try: orgas_to_delete[v.lower()] except KeyError: try: ods = orgas["name"][v.lower()] except KeyError: continue for ix, (_, o) in enumerate(ods.items()): orgas_to_delete[f"{v}{ix}".lower()] = { "bw": o, "vault": vdata, "name": v, "collections": OrderedDict(), } L.info(f"Will delete orga {v}") # either create or edit passwords parallel = as_bool(os.environ.get("BW_PARALLEL_IMPORT", "1")) # parallel = False processes = int(os.environ.get("BW_PARALLEL_IMPORT_PROCESSES", "30")) items = [] for org, odata in orgas_to_delete.items(): if odata["bw"] is not None: items.append([client, odata]) if parallel: with Pool(processes=processes) as pool: results = pool.starmap_async(record, items) results.wait() else: for item in items: record(*item)
def main(jsonf, server, email, password, assingleorg): L.info("start") client = Client(vaultier=True) client.sync() ciphers_to_import = OrderedDict() vaultier_secrets = {} for jsonff in jsonf.split(":"): with open(jsonff) as fic: data = json.load(fic) orga = {} if assingleorg: organ = data["name"] orga = client.get_organization(organ) for iv, vdata in enumerate(data["vaults"]): v = vdata["name"] if not vdata["cards"]: L.info(f"Skipping {v} as it has no cards") continue if not assingleorg: orga = client.get_organization(v) collections = client.get_collections(orga) for cdata in vdata["cards"]: cn = sanitize(cdata["name"]) vc = cn if assingleorg: vc = f"{v} {cn}" collection = client.get_collection(vc, collections=collections) cid = collection.id for ix, secret in enumerate(cdata["secrets"]): sid = f"{secret['id']}" vaultier_secrets[sid] = secret sd = secret.get("data", {}) idata = { "vault": vdata, "card": cdata, "sid": sid, "actions": [], "secret": secret, "collection": collection, "collections": [], "orga": orga, "patch": {}, "bw": None, } sname = get_name(idata) idata["name"] = sname try: sec = client.get_cipher( sid, vc, collections=collections, orga=orga, vaultier=True, sync=False, ) if sec.vaultiersecretid != sid: raise SecretNotFound() idata["bw"] = sec edit = False # # vaultier otypes: 200: secret, 300, files, 100: Note # if secret["type"] in [ VAULTIER_SECRET.file, VAULTIER_SECRET.secret, ]: login = getattr(sec, "login", {}) or {} if any(( (login.get("username", "") or "") != (sd.get("username") or ""), (login.get("password", "") or "") != (sd.get("password") or ""), )): edit = True uris = login.get("uris", {}) or {} urls = [ a.get("uri", "") for a in uris if a.get("uri", "") ] if sd.get("url") and (sd["url"] not in urls): edit = True if secret["type"] in [a for a in VAULTIER_SECRET]: if any(( (sec.name or "") != (sname or ""), (sec.notes or "") != (get_note(idata) or ""), )): edit = True if edit: idata["actions"].append("edit") L.info( f"Will patch already existing {sec.name} in {vc}" ) if secret["type"] == VAULTIER_SECRET.file: fn = secret["blob_meta"]["filename"] try: filenames = [ a["fileName"] for a in client.get_attachments(sec) ] except NoAttachmentsError: filenames = [] if fn not in filenames: idata["actions"].append("attach") else: L.info( f"Already attached {fn} to {sec.name}/{sec.id} in {vc}" ) if cid not in sec.collectionIds: idata["actions"].append("link") idata["collections"] = [cid] + (sec.collectionIds or []) L.info(f"Will link {sec.name} in {vc}") if not idata["actions"]: L.info( f"Already created {sec.name}/{sec.id} in {vc}") except SecretNotFound: try: ciphers_to_import[sid] except KeyError: idata["actions"].append("create") if secret["type"] == VAULTIER_SECRET.file: idata["actions"].append("attach") idata["collections"] = [cid] idata["actions"].append("link") L.info(f'Will create {secret["name"]} in {vc}') if idata["actions"]: ciphers_to_import[sid] = idata constructed = OrderedDict() # either create or edit passwords parallel = as_bool(os.environ.get("BW_PARALLEL_IMPORT", "1")) # parallel = False processes = int(os.environ.get("BW_PARALLEL_IMPORT_PROCESSES", "10")) if parallel: with Pool(processes=processes) as pool: res = pool.starmap_async( record, [(client, cipherd) for n, cipherd in ciphers_to_import.items()]) res.wait() for ret in res.get(): if not ret: continue constructed[ret.id] = ret else: for n, cipherd in ciphers_to_import.items(): ret = record(client, cipherd) if not ret: continue constructed[ret.id] = ret return constructed
def main(jsonf, server, email, password, assingleorg): L.info("start") client = Client(server, email, password) client.api_sync() orgas_to_import = OrderedDict() for jsonff in jsonf.split(":"): with open(jsonff) as fic: data = json.load(fic) orga = {} if assingleorg: organ = data["name"] try: orgao = client.get_organization(organ) L.info(f"Already created orga {organ}") except bwclient.OrganizationNotFound: orgao = None L.info(f"Will create orga: {organ}") for vi, vdata in enumerate(data["vaults"]): v = sanitize(vdata["name"]) if not vdata["cards"]: L.info(f"Skipping {v} as it has no cards") continue if not assingleorg: orga = {} organ = v try: orgas_to_import[organ.lower()] except KeyError: try: orgao = client.get_organization(v) L.info(f"Already created orga {v}") except bwclient.OrganizationNotFound: orgao = None L.info(f"Will create orga: {v}") orga.update({"bw": orgao, "name": organ}) orga.setdefault("collections", OrderedDict()) orgas_to_import.setdefault(organ.lower(), orga) for cdata in vdata["cards"]: cn = sanitize(cdata["name"]) vc = cn if assingleorg: vc = f"{v} {cn}" try: orga["collection_name"] except KeyError: orga["collection_name"] = vc L.info(f"{vc} is Default Collection") continue try: if not orga["bw"]: raise KeyError() client.get_collection(vc, orga=orga["bw"]) L.info(f"Already created {vc}") except (bwclient.CollectionNotFound, KeyError): try: orga["collections"][vc.lower()] except KeyError: L.info(f"Will create {vc} in orga: {v}") orga["collections"][vc.lower()] = {"card": cdata, "name": vc} orga.setdefault("collection_name", vc) constructed = OrderedDict() parallel = as_bool(os.environ.get("BW_PARALLEL_IMPORT", "1")) processes = int(os.environ.get("BW_PARALLEL_IMPORT_PROCESSES", "100")) # create orgas items = [] for org, odata in orgas_to_import.items(): if odata["bw"] is None: items.append([client, org, odata, email, constructed]) if parallel: with Pool(processes=processes) as pool: res = pool.starmap_async(record_orga, items) res.wait() for ret in res.get(): if not ret: continue org, orga = ret orgas_to_import[org]["bw"] = constructed[orga.id] = orga else: for item in items: ret = record_orga(*item) org, orga = ret orgas_to_import[org]["bw"] = constructed[orga.id] = orga client.refresh() # create collections items = [] for i, o in orgas_to_import.items(): for col, c in o["collections"].items(): items.append([client, i, o["bw"].id, col, c, constructed]) if parallel: with Pool(processes=processes) as pool: res = pool.starmap_async(record, items) res.wait() for ret in res.get(): if not ret: continue constructed[ret.id] = ret else: for item in items: record(*item)
def main(jsonf, server, email, password, assingleorg, skippedusers): skipped_users_re = re.compile(skippedusers) L.info("start") client = Client(vaultier=True) if skippedusers: skippedusers = re.compile(skippedusers) client.sync() users_orgas = {} users_collections = {} caccesses = {} al = set() orgas = {} for jsonff in jsonf.split(":"): with open(jsonff) as fic: data = json.load(fic) orga = OrderedDict() collections = None oacls = data["acls"] if assingleorg: organ = data["name"] orga = client.get_organization(organ) collections = client.get_collections(orga=orga, sync=True) else: oacls = OrderedDict([(k, v) for k, v in oacls.items() if v >= 200]) coacls = oacls for iv, vdata in enumerate(data["vaults"]): if collections is None: collections = client.get_collections(orga=orga, sync=True) v = vdata["name"] vacls = vdata["acls"] if not vdata["cards"]: L.info(f"Skipping {v} as it has no cards") continue if not assingleorg: orga = client.get_organization(v) coacls = copy.deepcopy(oacls) coacls.update(vacls) oadmins = [a for a in coacls if coacls[a] == 200] eorga = orgas.setdefault(orga.id, {"orga": orga, "emails": set()}) for email, acle in coacls.items(): if skipped_users_re.search(email): log = f"{email} is old user, skipping" continue payload = {} payload["access_level"] = AL.admin if int(acle) >= 200: payload["accessAll"] = True if skippedusers and skippedusers.search(email): L.info(f"{email} is skipped") continue log = None eorga["emails"].add(email) try: uaccess = client.get_accesses({ "user": email, "orga": orga }) except bwclient.NoAccessError: bwacl = None else: oaccess = uaccess["oaccess"] bwacl = oaccess["daccess"].get(email, None) if (bwacl and (bwacl["type"] in [AL.admin, AL.manager]) and (payload["access_level"] == bwacl["type"])): log = f"User {email} is already in orga {orga.name} with right acls" if log: if log not in al: L.info(log) al.add(log) continue access = {"orga": orga, "payload": payload} ak = (orga.id, email) users_orgas[ak] = access for cdata in vdata["cards"]: cn = sanitize(cdata["name"]) vc = cn if assingleorg: vc = f"{v} {cn}" collection = client.get_collection(vc, collections=collections, orga=orga) try: caccess = caccesses[collection.id] except KeyError: caccess = caccesses[collection.id] = client.get_accesses( collection) cacls = copy.deepcopy(vacls) cacls.update(cdata["acls"]) for email, cacl in cacls.items(): eorga["emails"].add(email) if skippedusers and skippedusers.search(email): L.info(f"{email} is skipped") continue log = None if email in oadmins: continue if skipped_users_re.search(email): log = f"{email} is old user, skipping" if email in caccess["emails"]: log = f"User {email} is already in collection {collection.name}" if log: if log not in al: L.info(log) al.add(log) continue payload = {} access = {"collection": collection, "payload": payload} ak = (collection.id, email) caccess = caccesses[collection.id] = client.get_accesses( collection) users_collections[ak] = access # either create or edit passwords parallel = as_bool(os.environ.get("BW_PARALLEL_IMPORT", "1")) # parallel = False processes = int(os.environ.get("BW_PARALLEL_IMPORT_PROCESSES", "10")) constructed = OrderedDict() # invite users to orga record = add_to_orga # users_orgas = dict([(k, users_orgas[k]) for i, k in enumerate(users_orgas) if i<3]) L.info("add_to_orga") if parallel: with Pool(processes=processes) as pool: res = pool.starmap_async( record, [(client, email, oid, aclargs) for (oid, email), aclargs in users_orgas.items()], ) res.wait() for ret in res.get(): if not ret: continue constructed.update(ret) else: for (oid, email), aclargs in users_orgas.items(): ret = record(client, email, oid, aclargs) if not ret: continue constructed.update(ret) # invite users to collection record = add_to_collection L.info("add_to_collection") # users_collections = dict([(k, users_collections[k]) for i, k in enumerate(users_collections) if i < 13]) if parallel: with Pool(processes=processes) as pool: res = pool.starmap_async( record, [(client, email, cid, aclargs) for (cid, email), aclargs in users_collections.items()], ) res.wait() for ret in res.get(): if not ret: continue constructed.update(ret) else: for (cid, email), aclargs in users_collections.items(): ret = record(client, email, cid, aclargs) if not ret: continue constructed.update(ret) # autoaccept user invitation accept_invitations = OrderedDict() for orga, odata in orgas.items(): oaccess = client.get_accesses(odata["orga"]) for email in odata["emails"]: try: acl = oaccess["daccess"][email] except KeyError: continue else: # status: Invited = 0, Accepted = 1, Confirmed = 2, if acl["status"] == 0: accept_invitations[(orga, email)] = {"orga": odata["orga"]} record = do_accept_invitations L.info("do_accept_invitations") # users_collections = dict([(k, users_collections[k]) for i, k in enumerate(users_collections) if i < 13]) if parallel: with Pool(processes=processes) as pool: res = pool.starmap_async( record, [(client, email, oid, aclargs) for (oid, email), aclargs in accept_invitations.items()], ) res.wait() for ret in res.get(): if not ret: continue constructed.update(ret) else: for (oid, email), aclargs in accept_invitations.items(): ret = record(client, email, oid, aclargs) if not ret: continue constructed.update(ret) # autoconfirm user invitation confirm_invitations = OrderedDict() for orga, odata in orgas.items(): oaccess = client.get_accesses(odata["orga"]) for email in odata["emails"]: try: acl = oaccess["daccess"][email] except KeyError: continue else: # status: Invited = 0, Accepted = 1, Confirmed = 2, if acl["status"] == 1: confirm_invitations[(orga, email)] = { "orga": odata["orga"] } record = do_confirm_invitations L.info("do_confirm_invitations") # users_collections = dict([(k, users_collections[k]) for i, k in enumerate(users_collections) if i < 13]) if parallel: with Pool(processes=processes) as pool: res = pool.starmap_async( record, [(client, email, oid, aclargs) for (oid, email), aclargs in confirm_invitations.items()], ) res.wait() for ret in res.get(): if not ret: continue constructed.update(ret) else: for (oid, email), aclargs in confirm_invitations.items(): ret = record(client, email, oid, aclargs) if not ret: continue constructed.update(ret) return constructed
def main(jsonf, passwordsf, skippedusers): if skippedusers: skippedusers = re.compile(skippedusers, flags=re.I | re.M) L.info("start") client = Client() client.sync() vaultier_members = {} for jsonff in jsonf.split(":"): with open(jsonff) as fic: data = json.load(fic) # optim: load all secrets once for vdata in data["vaults"]: v = vdata["name"] for i in vdata["acls"]: vaultier_members.setdefault(i, {}) for cdata in vdata["cards"]: c = cdata["name"] for i in cdata["acls"]: vaultier_members.setdefault(i, {}) n = sanitize(f"{v} {c}") for ix, secret in enumerate(cdata["secrets"]): pass # unload skipped users for i in [a for a in vaultier_members]: if skippedusers and skippedusers.search(i): L.info(f"Skip {i}") vaultier_members.pop(i, None) # assign passwords if os.path.exists(passwordsf): with open(passwordsf, "r") as fic: passwords = json.loads(fic.read()) else: passwords = {} for i, idata in vaultier_members.items(): try: pw = passwords[i] except KeyError: pw = passwords[i] = bwcrypto.gen_password() vaultier_members[i]["password"] = pw with open(passwordsf, "w") as fic: json.dump(passwords, fic, indent=2, sort_keys=True) constructed = DONE["constructed"] # either create or edit passwords parallel = as_bool(os.environ.get("BW_PARALLEL_IMPORT", "1")) # parallel = False processes = int(os.environ.get("BW_PARALLEL_IMPORT_PROCESSES", "10")) items = [] for n, secretd in vaultier_members.items(): items.append((client, n, secretd, constructed)) if parallel: with Pool(processes=processes) as pool: res = pool.starmap_async(record, items) res.wait() for ret in res.get(): if not ret: continue constructed[ret[0].id] = ret else: for n, secretd in vaultier_members.items(): record(client, n, secretd, constructed) return constructed