def ldifize(ldap_result): """Writes ldap's query result in LDIF format to the given file.""" out = StringIO() for dn, attrs in ldap_result: ldif_writer = ldif.LDIFWriter(out) ldif_writer.unparse(dn, attrs) return out.getvalue()
def __init__(self, l, writer_obj, headerStr='', footerStr=''): if isinstance(writer_obj, ldif.LDIFWriter): self._ldif_writer = writer_obj else: self._ldif_writer = ldif.LDIFWriter(writer_obj) FileWriter.__init__(self, l, self._ldif_writer._output_file, headerStr, footerStr)
def parse_acls(args, lo): if isinstance(args.output, basestring): args.output = open(args.output, 'wb') entries = lo.search(base=args.base) writer = ldif.LDIFWriter(args.output) code = 0 for dn, attrs in entries: entry = {} for attr in attrs: # TODO: replace subprocess by some C calls to improove speed process = subprocess.Popen( ['slapacl', '-d0', '-D', args.binddn, '-b', dn, attr], stderr=subprocess.PIPE) _, stderr = process.communicate() for line in stderr.splitlines(): if line.startswith('%s: ' % (attr, )): entry.setdefault(attr, []).append( normalize_permission(line.split(': ', 1)[-1].strip())) try: entry[attr] except KeyError as exc: print >> sys.stderr, dn, exc code = 1 writer.unparse(dn, entry) return code
def ldap_dump(global_data): conn = ldap.initialize('ldaps://%s' % global_data["HOST"]) conn.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER) conn.set_option(ldap.OPT_X_TLS_NEWCTX, 0) basedn = "dc=" + ",dc=".join(global_data["DOMAIN"].split('.')) pw = global_data["DOMAINPASS"] user_dn = "%s@%s" % (global_data["DOMAINUSER"], global_data["DOMAIN"]) searchFilter = "(objectclass=*)" searchAttribute = [] searchScope = ldap.SCOPE_SUBTREE conn.protocol_version = ldap.VERSION3 conn.simple_bind_s(user_dn, pw) ldap_result_id = conn.search(basedn, searchScope, searchFilter, searchAttribute) result_set = [] while 1: result_type, result_data = conn.result(ldap_result_id, 0) if (result_data == []): break else: if result_type == ldap.RES_SEARCH_ENTRY: result_set.append(result_data) conn.unbind_s() f = io.StringIO("some initial text data") ldif_writer = ldif.LDIFWriter(f) for r in result_set: ldif_writer.unparse(r[0][0], r[0][1]) return f.getvalue()
def _raw2ldif(self, dn, raw): import ldif data = io.StringIO() lwr = ldif.LDIFWriter(data, cols=self.ldif_cols) lwr.unparse(dn, dict(raw)) data = data.getvalue() return data
def __restore_config(self): port = self.restore_state('nsslapd-port') security = self.restore_state('nsslapd-security') global_lock = self.restore_state('nsslapd-global-backend-lock') ldif_outfile = "%s.modified.out" % self.filename with open(ldif_outfile, "wb") as out_file: ldif_writer = ldif.LDIFWriter(out_file) with open(self.filename, "rb") as in_file: parser = ModifyLDIF(in_file, ldif_writer) if port is not None: parser.remove_value("cn=config", "nsslapd-port") parser.add_value("cn=config", "nsslapd-port", port) if security is not None: parser.remove_value("cn=config", "nsslapd-security") parser.add_value("cn=config", "nsslapd-security", security) # disable global lock by default parser.remove_value("cn=config", "nsslapd-global-backend-lock") if global_lock is not None: parser.add_value("cn=config", "nsslapd-global-backend-lock", global_lock) parser.parse() shutil.copy2(ldif_outfile, self.filename)
def to_ldif(self): out = StringIO() ldif_out = ldif.LDIFWriter(out) newdata = {} if hasattr(self, 'attrs'): newdata.update(self.attrs) ldif_out.unparse(self.dn, newdata) return out.getvalue()
def to_ldif(self): """ Get an LDIF representation of this record. to_ldif()->string """ out = StringIO() ldif_out = ldif.LDIFWriter(out) ldif_out.unparse(self.dn, self.attrs) return out.getvalue()
def __init__(self, input_file, output_file): """ :param input_file: an LDIF :param output_file: an LDIF file """ ldif.LDIFParser.__init__(self, input_file) self.writer = ldif.LDIFWriter(output_file) self.dn_updated = set() self.modifications = {} # keep modify operations in original order
def _backup_dn_recursive(l, dn): if isinstance(l, LDIFObject): return backup_file = os.path.join(BACKUP_DIR, str(time.time())) ud.debug(ud.LISTENER, ud.PROCESS, 'replication: dump %s to %s' % (dn, backup_file)) with open(backup_file, 'w+') as fd: os.fchmod(fd.fileno(), 0o600) ldif_writer = ldifparser.LDIFWriter(fd) for dn, entry in l.search_s(dn, ldap.SCOPE_SUBTREE, '(objectClass=*)', attrlist=['*', '+']): ldif_writer.unparse(dn, entry)
async def ldifDump(dn: str) -> quart.Response: 'Dump an entry as LDIF' out = io.StringIO() writer = ldif.LDIFWriter(out) async for dn, attrs in result(request.ldap.search(dn, ldap.SCOPE_SUBTREE)): writer.unparse(dn, attrs) resp = quart.Response(out.getvalue(), content_type='text/plain') resp.headers['Content-Disposition'] = \ 'attachment; filename="%s.ldif"' % dn.split(',')[0].split('=')[1] return resp
def __enable_ds_global_write_lock(self): ldif_outfile = "%s.modified.out" % self.filename with open(ldif_outfile, "wb") as out_file: ldif_writer = ldif.LDIFWriter(out_file) with open(self.filename, "rb") as in_file: parser = ModifyLDIF(in_file, ldif_writer) parser.remove_value("cn=config", "nsslapd-global-backend-lock") parser.add_value("cn=config", "nsslapd-global-backend-lock", "on") parser.parse() shutil.copy2(ldif_outfile, self.filename)
def _unparse_records(self, records): """ Returns LDIF string with entry records from list `records' """ ldif_file = StringIO() ldif_writer = ldif.LDIFWriter(ldif_file) if self.record_type == 'entry': for dn, entry in records: ldif_writer.unparse(dn, entry) elif self.record_type == 'change': for dn, modops, controls in records: ldif_writer.unparse(dn, modops) return ldif_file.getvalue()
def __repr__(self): """Convert the Entry to its LDIF representation""" sio = cStringIO.StringIO() # what's all this then? the unparse method will currently only accept # a list or a dict, not a class derived from them. self.data is a # cidict, so unparse barfs on it. I've filed a bug against python-ldap, # but in the meantime, we have to convert to a plain old dict for printing # I also don't want to see wrapping, so set the line width really high (1000) newdata = {} newdata.update(self.data) ldif.LDIFWriter(sio, Entry.base64_attrs, 1000).unparse(self.dn, newdata) return sio.getvalue()
def ldif2db(self, instance, backend, online=True): ''' Restore a LDIF backup of the data in this instance. If executed online create a task and wait for it to complete. ''' self.log.info('Restoring from %s in %s' % (backend, instance)) now = time.localtime() cn = time.strftime('import_%Y_%m_%d_%H_%M_%S') dn = DN(('cn', cn), ('cn', 'import'), ('cn', 'tasks'), ('cn', 'config')) ldifdir = paths.SLAPD_INSTANCE_LDIF_DIR_TEMPLATE % instance ldifname = '%s-%s.ldif' % (instance, backend) ldiffile = os.path.join(ldifdir, ldifname) srcldiffile = os.path.join(self.dir, ldifname) if not os.path.exists(ldifdir): pent = pwd.getpwnam(DS_USER) os.mkdir(ldifdir, 0770) os.chown(ldifdir, pent.pw_uid, pent.pw_gid) ipautil.backup_file(ldiffile) with open(ldiffile, 'wb') as out_file: ldif_writer = ldif.LDIFWriter(out_file) with open(srcldiffile, 'rb') as in_file: ldif_parser = RemoveRUVParser(in_file, ldif_writer, self.log) ldif_parser.parse() if online: conn = self.get_connection() ent = conn.make_entry( dn, { 'objectClass': ['top', 'extensibleObject'], 'cn': [cn], 'nsFilename': [ldiffile], 'nsUseOneFile': ['true'], }) ent['nsInstance'] = [backend] try: conn.add_entry(ent) except Exception, e: self.log.error("Unable to bind to LDAP server: %s" % e) return self.log.info("Waiting for LDIF to finish") wait_for_task(conn, dn)
def get_ldif(self, mod_attrs=None): """ get ldif of ldap object """ output = StringIO() if not mod_attrs: mod_attrs = self.attributes writer = ldif.LDIFWriter(output) writer.unparse(self.dn, mod_attrs) result = output.getvalue() output.close() return result
def to_ldif(self): #Get an LDIF representation of this record. #to_ldif()-> string out = StringIO() ldif_out = ldif.LDIFWriter(out) # what's all this then? the unparse method will currently only accept # a list or a dict, not a class derived from them. self.data is a # cidict, so unparse barfs on it. I've filed a bug against python-ldap, # but in the meantime, we have to convert to a plain old dict for printing newdata = {} if hasattr(self, 'attrs'): newdata.update(self.attrs) ldif_out.unparse(self.dn, newdata) return out.getvalue()
def get_ldif(value): """ Converts a policy object to LDIF string. Args: value: Policy setting object Returns: LDIF string """ output = StringIO.StringIO() writer = ldif.LDIFWriter(output) try: writer.unparse(value[0], value[1]) text = output.getvalue() except (KeyError, TypeError): return '' output.close() return text
def __init__(self,l,writer_obj,headerStr='',footerStr=''): """ Initialize a StreamResultHandler Parameters: l LDAPObject instance writer_obj Either a file-like object or a ldif.LDIFWriter instance used for output """ import ldif if isinstance(writer_obj,ldif.LDIFWriter): self._ldif_writer = writer_obj else: self._ldif_writer = ldif.LDIFWriter(writer_obj) FileWriter.__init__(self,l,self._ldif_writer._output_file,headerStr,footerStr)
def create_ldif_from_master(lo, ldif_file, base, page_size): """ create ldif file from everything from lo """ logging.info('Fetching LDIF ...') if ldif_file == '-': output = sys.stdout else: output = io.StringIO() lc = SimplePagedResultsControl(criticality=True, size=page_size, cookie='') page_ctrl_oid = lc.controlType writer = ldif.LDIFWriter(output, cols=10000) while True: msgid = lo.lo.search_ext(base, ldap.SCOPE_SUBTREE, '(objectclass=*)', ['+', '*'], serverctrls=[lc]) rtype, rdata, rmsgid, serverctrls = lo.lo.result3(msgid) for dn, data in rdata: logging.debug('Processing %s ...', dn) for attr in replication.EXCLUDE_ATTRIBUTES: data.pop(attr, None) writer.unparse(dn, data) pctrls = [c for c in serverctrls if c.controlType == page_ctrl_oid] if pctrls: cookie = lc.cookie = pctrls[0].cookie if not cookie: break else: logging.warning( "Server ignores RFC 2696 Simple Paged Results Control.") break if isinstance(output, io.StringIO): if os.path.isfile(ldif_file): os.unlink(ldif_file) with gzip.open(ldif_file, 'w') as fd: fd.write(output.getvalue().encode('UTF-8')) output.close()
def _move_ruv(ldif_file): """ Move RUV entry in an ldif file to the top""" with open(ldif_file) as f: parser = ldif.LDIFRecordList(f) parser.parse() ldif_list = parser.all_records for dn in ldif_list: if dn[0].startswith('nsuniqueid=ffffffff-ffffffff-ffffffff-ffffffff'): ruv_index = ldif_list.index(dn) ldif_list.insert(0, ldif_list.pop(ruv_index)) break with open(ldif_file, 'w') as f: ldif_writer = ldif.LDIFWriter(f) for dn, entry in ldif_list: ldif_writer.unparse(dn, entry)
def __disable_listeners(self): ldif_outfile = "%s.modified.out" % self.filename with open(ldif_outfile, "wb") as out_file: ldif_writer = ldif.LDIFWriter(out_file) with open(self.filename, "rb") as in_file: parser = ModifyLDIF(in_file, ldif_writer) parser.remove_value("cn=config", "nsslapd-port") parser.add_value("cn=config", "nsslapd-port", "0") parser.remove_value("cn=config", "nsslapd-security") parser.add_value("cn=config", "nsslapd-security", "off") parser.remove_value("cn=config", "nsslapd-ldapientrysearchbase") parser.parse() shutil.copy2(ldif_outfile, self.filename)
def write(self, fh): # Write result header comments fh.write('# result: %d\n' % self.type) fh.writelines('# control: %s\n' % ctrl for ctrl in self.ctrls) fh.write('#\n') # Write LDIF data writer = ldif.LDIFWriter(fh) for dn, attrs, ctrls in self.data: if self.type == ldap.RES_INTERMEDIATE: ctrl = LdapResponseControl(dn) ctrl.decodeControlValue(attrs) dn = '' record = {'control': [ctrl.to_ldif().encode()]} else: record = dict(attrs) for ctrl in ctrls: record.setdefault('control', []) record['control'].append(ctrl.to_ldif().encode()) writer.unparse(dn, record)
def __str__(self): if self.is_origin(): self.dn = 'idnsName={z},{b}'.format(z=zone.origin, b=base) self.ldap_obj['idnsName'] = [str(zone.origin)] else: self.dn = 'idnsName={n},idnsName={z},{b}'.format(n=name, z=zone.origin, b=base) self.ldap_obj['idnsName'] = [str(name)] self.ldap_obj.update(self.records) if self.ttl: self.ldap_obj['DNSTTL'] = [str(self.ttl)] buff = io.StringIO() ldifw = ldif.LDIFWriter(buff) ldifw.unparse(self.dn, convert(self.ldap_obj)) return buff.getvalue()
def ldap_dump(self): try: self.ldap.simple_bind(self.config['OpenLDAP']['binddn'], self.config['OpenLDAP']['password']) except ldap.LDAPError as e: if self.debug: print(e) return None result = None try: results = self.ldap.search_s(self.config['OpenLDAP']['base'], ldap.SCOPE_SUBTREE, self.config['OpenLDAP']['filter']) ldif_writer = ldif.LDIFWriter(sys.stdout) for dn, entry in results: ldif_writer.unparse(dn, entry) except ldap.LDAPError as e: if self.debug: print(e) pass return result
def generate_ldif(self, subsystem, out_file): tmp_file = tempfile.NamedTemporaryFile(delete=False) try: subsystem.customize_file(TPS_VLV_PATH, tmp_file.name) parser = ldif.LDIFRecordList(open(tmp_file.name, 'rb')) parser.parse() with open(out_file, 'w') as outfile: writer = ldif.LDIFWriter(outfile) for dn, _ in reversed(parser.all_records): entry = {'changetype': ['delete']} writer.unparse(dn, entry) self.print_message('Output: %s' % out_file) finally: os.unlink(tmp_file.name)
def _remove_replication_data(ldif_file): """ Remove the replication data from ldif file: db2lif without -r includes some of the replica data like - nsUniqueId - keepalive entries This function filters the ldif fil to remove these data """ with open(ldif_file) as f: parser = ldif.LDIFRecordList(f) parser.parse() ldif_list = parser.all_records # Iterate on a copy of the ldif entry list for dn, entry in ldif_list[:]: if dn.startswith('cn=repl keep alive'): ldif_list.remove((dn, entry)) else: entry.pop('nsUniqueId') with open(ldif_file, 'w') as f: ldif_writer = ldif.LDIFWriter(f) for dn, entry in ldif_list: ldif_writer.unparse(dn, entry)
def res2ldif(self, dn, res): data = io.StringIO() lwr = ldif.LDIFWriter(data, cols=80) lwr.unparse(dn, dict(res)) data = data.getvalue() return data
def get_next_file_to_backup(self, savepkt): """ Find out the next file that should be backed up """ # When file_to_backup is not None we should return the LDIF. if self.file_to_backup: # Remove some attributes from entry before creating the LDIF. ignore_attribute = ["createTimestamp", "modifyTimestamp"] keys = self.entry.keys() for value in keys: if value in ignore_attribute: del self.entry[value] # Dump the content of the LDAP entry as LDIF text ldif_dump = StringIO() ldif_out = ldif.LDIFWriter(ldif_dump) try: ldif_out.unparse(self.dn, self.entry) except UnicodeDecodeError: ldif_out.unparse(self.dn.decode("utf-8"), self.entry) self.ldif = ldif_dump.getvalue() self.ldif_len = len(self.ldif) ldif_dump.close() statp = bareosfd.StatPacket() statp.st_mode = S_IRWXU | S_IFREG statp.st_size = self.ldif_len if self.unix_create_time: statp.st_ctime = self.unix_create_time if self.unix_modify_time: statp.st_mtime = self.unix_modify_time savepkt.statp = statp savepkt.type = bareosfd.bFileType["FT_REG"] savepkt.fname = self.file_to_backup + "/data.ldif" # Read the content of a file savepkt.no_read = False # On next run we need to get next entry from result set. self.file_to_backup = None else: # If we have no result set get what the LDAP search returned as resultset. if self.resultset is None: self.resultset = self.ld.allresults(self.msg_id) # Try to get the first result set from the query, # if there is nothing return an error. try: res_type, res_data, res_msgid, res_controls = self.resultset.next( ) self.ldap_entries = res_data except ldap.NO_SUCH_OBJECT: return bareosfd.bRC_Error except StopIteration: return bareosfd.bRC_Error # Get the next entry from the result set. if self.ldap_entries: self.dn, self.entry = self.ldap_entries.pop(0) if self.dn: # Extract the createTimestamp and modifyTimestamp and # convert it to an UNIX timestamp self.unix_create_time = None try: createTimestamp = self.entry["createTimestamp"][0] except KeyError: pass else: self.unix_create_time = self.to_unix_timestamp( createTimestamp) self.unix_modify_time = None try: modifyTimestamp = self.entry["modifyTimestamp"][0] except KeyError: pass else: self.unix_modify_time = self.to_unix_timestamp( modifyTimestamp) # Convert the DN into a PATH e.g. reverse the elements. dn_sliced = self.dn.split(",") self.file_to_backup = "@LDAP" + "".join( ["/" + element for element in reversed(dn_sliced)]) statp = bareosfd.StatPacket() statp.st_mode = S_IRWXU | S_IFDIR if self.unix_create_time: statp.st_ctime = self.unix_create_time if self.unix_modify_time: statp.st_mtime = self.unix_modify_time savepkt.statp = statp savepkt.type = bareosfd.bFileType["FT_DIREND"] savepkt.fname = self.file_to_backup # A directory has a link field which contains # the fname + a trailing '/' savepkt.link = self.file_to_backup + "/" # Don't read the content of a directory savepkt.no_read = True if "/" in self.dn: bareosfd.JobMessage( bareosfd.bJobMessageType["M_ERROR"], "Slashes (/) in DN not supported. Skipping %s" % self.dn, ) # set to none, so the object will not be picket up self.file_to_backup = None return bareosfd.bRC_OK
def to_ldif(self): out = StringIO() ldif_out = ldif.LDIFWriter(out) ldif_out.unparse(self.dn, self.attrs) return out.getvalue()