def data_from_ldif(self, filename): with open(filename, "rb") as fd: parser = LDIFParser(fd) for dn, entry in parser.parse(): doc_id = doc_id_from_dn(dn) oc = entry.get("objectClass") or entry.get("objectclass") if oc: if "top" in oc: oc.remove("top") if len(oc) == 1 and oc[0].lower() in ("organizationalunit", "organization"): continue table_name = oc[-1] if "objectClass" in entry: entry.pop("objectClass") elif "objectclass" in entry: entry.pop("objectclass") attr_mapping = OrderedDict({ "doc_id": doc_id, "objectClass": table_name, "dn": dn, }) for attr in entry: value = self.transform_value(attr, entry[attr]) attr_mapping[attr] = value yield table_name, attr_mapping
def ldap_import(ldif_file): print(f"Importing LDAP environment from {ldif_file}") try: parser = LDIFParser(open(ldif_file, "rb")) for dn, record in parser.parse(): print(f"Importing {dn}") # print(record) # Uncomment this file to print record if you want to debug tuple_list_as_bytes = [] for attribute_name, attribute_values in record.items(): value_as_bytes = [] for value in attribute_values: value_as_bytes.append(value.encode("utf-8")) # str to bytes tuple_list_as_bytes.append((attribute_name, value_as_bytes)) try: conn.add_s(dn, tuple_list_as_bytes) print("Import successful") except ldap.ALREADY_EXISTS: print("Entry already existing in your LDAP, ignoring ...") except Exception as err: print(f"Unable to import record due to {err}") except FileNotFoundError: print(f"Unable to locate {ldif_file}. Make sure the path is correct") sys.exit(1) except ValueError as err: print(f"Unable to read ldif. Make sure the LDIF was created with this utility. Error {err}") sys.exit(1) except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] print(f"Unable to import LDAP environment due to {err}. {exc_type}, {fname}, {exc_tb.tb_lineno}") sys.exit(1)
def import_ldif(self, bucket_mappings): ctx = prepare_template_ctx(self.manager) attr_processor = AttrProcessor() for _, mapping in bucket_mappings.items(): for file_ in mapping["files"]: logger.info(f"Importing {file_} file") src = f"/app/templates/{file_}" dst = f"/app/tmp/{file_}" os.makedirs(os.path.dirname(dst), exist_ok=True) render_ldif(src, dst, ctx) with open(dst, "rb") as fd: parser = LDIFParser(fd) for dn, entry in parser.parse(): if len(entry) <= 2: continue key = id_from_dn(dn) entry["dn"] = [dn] entry = transform_entry(entry, attr_processor) data = json.dumps(entry) # using INSERT will cause duplication error, but the data is left intact query = 'INSERT INTO `%s` (KEY, VALUE) VALUES ("%s", %s)' % ( mapping["bucket"], key, data) req = self.client.exec_query(query) if not req.ok: logger.warning( "Failed to execute query, reason={}".format( req.json()))
def parse_ldif_file(ldif_file: str) -> list[tuple[str, dict[str, Any]]]: logger.info(f"### Parsing LDIF file {ldif_file}") orig_ldif_fd = open(ldif_file, "rb") ldif_fd = BytesIO() for line in orig_ldif_fd.readlines(): if line.startswith(b"# search result"): break ldif_fd.write(line) ldif_fd.seek(0) parser = LDIFParser(ldif_fd) return list(parser.parse())
def import_ldif(self): optional_scopes = json.loads(self.manager.config.get("optional_scopes", "[]")) ldif_mappings = get_ldif_mappings(optional_scopes) # hybrid means only a subsets of ldif are needed persistence_type = os.environ.get("CN_PERSISTENCE_TYPE", "ldap") ldap_mapping = os.environ.get("CN_PERSISTENCE_LDAP_MAPPING", "default") if persistence_type == "hybrid": mapping = ldap_mapping ldif_mappings = {mapping: ldif_mappings[mapping]} # # these mappings require `base.ldif` # opt_mappings = ("user", "token",) # `user` mapping requires `o=gluu` which available in `base.ldif` # if mapping in opt_mappings and "base.ldif" not in ldif_mappings[mapping]: if "base.ldif" not in ldif_mappings[mapping]: ldif_mappings[mapping].insert(0, "base.ldif") ctx = prepare_template_ctx(self.manager) for mapping, files in ldif_mappings.items(): self.check_indexes(mapping) for file_ in files: logger.info(f"Importing {file_} file") src = f"/app/templates/{file_}" dst = f"/app/tmp/{file_}" os.makedirs(os.path.dirname(dst), exist_ok=True) render_ldif(src, dst, ctx) with open(dst, "rb") as fd: parser = LDIFParser(fd) for dn, entry in parser.parse(): self.add_entry(dn, entry)
fatal_error("Error: Invalid input file!") if not in_file_path.exists() or in_file_path.is_dir(): fatal_error("Error: Invalid input file path!") with in_file_path.open('rb') as f: if in_file_ext == '.ml': ml_bytes = f.read() ml = CscaMasterList.load(ml_bytes) verify_and_extract_masterlist(ml, default_out_dir_csca.joinpath(get_ml_out_dir_name(ml)) ) else: parser = LDIFParser(f) print("Note: DSC and CRL won't be verified against issuing CSCA!") for dn, entry in parser.parse(): # ML if 'CscaMasterListData' in entry: ml = entry['CscaMasterListData'][0] ml = CscaMasterList.load(ml) verify_and_extract_masterlist(ml, default_out_dir_csca.joinpath(get_ml_out_dir_name(ml)) ) # DSC elif 'userCertificate' in entry or 'userCertificate;binary' in entry: dn = parse_dn(dn) dsc = entry['userCertificate;binary'][0] dsc = DocumentSignerCertificate.load(dsc) f = get_ofile_for_dsc(dsc, default_out_dir_dsc / dn['c'].lower() / 'unverified')
class MyRestore(): def __init__(self, args=None, add_blacklist_attrs=None): self.args = args self.add_blacklist_attrs = add_blacklist_attrs self.unique_member_of = set() self.backup_data = None self.ldap_data = None self.lo, self.position = univention.admin.uldap.getAdminConnection() self.co = univention.admin.config.config() self.ucr = univention.config_registry.ConfigRegistry() self.ucr.load() univention.admin.modules.update() self.operational_mark = [ 'directoryOperation', 'dSAOperation', 'distributedOperation' ] self.operational_attributes = set(['entryCSN', 'entrycsn']) self.get_operational_attributes() def get_backup_data(self): print('Checking backup file {0}.'.format(self.args.backup_file)) with gzip.open(self.args.backup_file, 'rb') as f: self.ldif_parser = LDIFParser(f) self.ldif_parser.handle = self.ldap_parser_handle self.ldif_parser.parse() def identify_udm(self, entry): try: udm_type = entry.get('univentionObjectType', [None])[0] univention.admin.modules.update() udm = univention.admin.modules.get(udm_type) univention.admin.modules.init(self.lo, self.position, udm) return udm.object(self.co, self.lo, self.position, dn=self.args.dn, attributes=entry) except Exception: return None def ldap_parser_handle(self, dn, entry): if self.args.verbose or self.args.list_dns: print('\t{0}: {1}'.format(self.ldif_parser.records_read, dn)) if self.args.dn: if self.args.restore_membership: if self.args.dn.lower() in map(str.lower, entry.get('uniqueMember', [])): self.unique_member_of.add(dn) if self.args.dn.lower() == dn.lower(): if not self.args.restore_membership: self._max_entries = 1 self.backup_data = entry def get_ldap_data(self): self.ldap_data = self.lo.get(self.args.dn) def get_operational_attributes(self): schema = self.lo.search(filter='(objectClass=subschema)', base='cn=subschema', scope='base', attr=['*', '+']) for i in schema[0][1].get('attributeTypes'): for j in self.operational_mark: if j.lower() in i.lower(): attr = i.split('NAME ')[1].split("'")[1] self.operational_attributes.add(attr) self.operational_attributes.add(attr.lower()) def create_modlist(self, new=None, old=None): ml = list() if new and not old: ml = addModlist(new, ignore_attr_types=self.operational_attributes) elif new and old: ml = modifyModlist(old, new, ignore_attr_types=self.operational_attributes) return ml def dn_exists(self, dn): return bool(self.lo.get(dn)) def check_blacklist_attrs(self): for attr in self.add_blacklist_attrs: val = self.backup_data.get(attr, [None])[0] if val: l_filter = '({0}={1})'.format(attr, escape_filter_chars(val)) res = self.lo.search(l_filter) if res: return argparse.Namespace(value=val, attr=attr, dn=res[0][0]) return None # modify def update_from_backup(self): self.get_ldap_data() ml = self.create_modlist(new=self.backup_data, old=self.ldap_data) if ml: if self.args.verbose or self.args.dry_run: print('\tUpdating {0} with modlist:'.format(self.args.dn)) my_pretty_print(ml) if not self.args.dry_run: try: self.lo.lo.modify_ext_s(self.args.dn, ml) except Exception: print('ERROR: Modify {0} with attributes'.format( self.args.dn)) pprint.pprint(ml) print('failed with:') raise else: print('No changes from backup data.') def update_membership(self): if self.args.dry_run: udm_object = self.backup_udm_object else: udm_object = self.ldap_udm_object udm_object.open() if 'groups' in udm_object: udm_object['groups'] = list() for grp in self.unique_member_of: if self.dn_exists(grp): if self.args.verbose or self.args.dry_run: print('Adding group {0} to {1}'.format( grp, self.args.dn)) udm_object['groups'].append(grp) if not self.args.dry_run: udm_object.modify() def add_from_backup(self): ml = self.create_modlist(new=self.backup_data) if self.args.verbose or self.args.dry_run: print('\tAdding {0} with modlist:'.format(self.args.dn)) my_pretty_print(ml) if not self.args.dry_run: try: self.lo.add(self.args.dn, ml, exceptions=True) except Exception: print('ERROR: Adding {0} with attributes'.format(self.args.dn)) pprint.pprint(ml) print('failed with:') raise def delete_in_ldap(self): udm_object = self.ldap_udm_object if udm_object: udm_object.open() if self.args.verbose or self.args.dry_run: print('\tRemoving {0} from LDAP.'.format(self.args.dn)) if not self.args.dry_run: udm_object.remove() # properties @property def backup_data(self): return self.backup_data @property def backup_udm_object(self): return self.identify_udm(self.backup_data) @property def ldap_udm_object(self): self.get_ldap_data() return self.identify_udm(self.ldap_data) @property def unique_member_of(self): return self.unique_member_of @property def ldap_data(self): return self.ldap_data