Пример #1
0
    def reindex_vlv(self, subsystem, bind_dn, bind_password):

        input_file = tempfile.NamedTemporaryFile(delete=False)
        subsystem.customize_file(TPS_VLV_TASKS_PATH, input_file.name)

        conn = subsystem.open_database(bind_dn=bind_dn,
                                       bind_password=bind_password)

        try:
            parser = ldif.LDIFRecordList(open(input_file.name, 'rb'))
            parser.parse()

            for dn, entry in parser.all_records:

                logger.info('Adding %s', dn)

                add_modlist = ldap.modlist.addModlist(entry)
                conn.ldap.add_s(dn, add_modlist)

                while True:
                    time.sleep(1)

                    try:
                        logger.info('Checking %s', dn)

                        conn.ldap.search_s(dn, ldap.SCOPE_BASE)
                    except ldap.NO_SUCH_OBJECT:
                        break

        finally:
            os.unlink(input_file.name)
            conn.close()

        self.print_message('Reindex complete')
Пример #2
0
    def reindex_vlv(self, subsystem, bind_dn, bind_password):

        ldif_file = tempfile.NamedTemporaryFile(delete=False)
        subsystem.customize_file(KRA_VLV_TASKS_PATH, ldif_file.name)

        conn = subsystem.open_database(bind_dn=bind_dn,
                                       bind_password=bind_password)

        print('Initiating KRA VLV reindex for ' + subsystem.instance.name)

        try:
            parser = ldif.LDIFRecordList(open(ldif_file.name, "rb"))
            parser.parse()

            for dn, entry in parser.all_records:

                logger.info('Adding %s', dn)

                add_modlist = ldap.modlist.addModlist(entry)
                conn.ldap.add_s(dn, add_modlist)

                while True:
                    time.sleep(1)

                    try:
                        logger.info('Checking %s', dn)

                        conn.ldap.search_s(dn, ldap.SCOPE_BASE)
                    except ldap.NO_SUCH_OBJECT:
                        break

        finally:
            os.unlink(ldif_file.name)
            conn.close()
Пример #3
0
def parseLDIF(ldif_file):

    file = open(ldif_file, 'rb')

    return ldif.LDIFRecordList(file)

    file.close()
Пример #4
0
def addADUsers():
    try:
        localLDAP = ldap.initialize(LDAP_URI)

        localLDAP.simple_bind_s(LDAP_USERNAME, LDAP_PASSWORD)
        localLDAP.protocol_version = ldap.VERSION3

        ldif_file = urllib.urlopen('import.ldif')
        parser = ldif.LDIFRecordList(ldif_file)
        parser.parse()

        for dn, entry in parser.all_records:

            attrs = {}

        if 'mail' in entry and 'uid' in entry:
            attrs['name'] = entry['uid']
            attrs['mail'] = entry['mail']
            attrs['givenName'] = entry['displayName']
            attrs['sn'] = ['']
        attrs['uid'] = entry['uid']
        addUser2Gitlab(attrs)

    except ldap.LDAPError as e:
        print("Adding development users failed: ")
        print('LDAPError: %s.' % e)
    finally:
        localLDAP.unbind_s()
Пример #5
0
 def read(cls, fh):
     self = cls()
     # Read result header comments
     while True:
         line = fh.readline()
         m = cls.RE.fullmatch(line.rstrip())
         if not m:
             break
         if m['result']:
             self.type = int(m['result'])
         elif m['control']:
             ctrl = LdapResponseControl.from_ldif(m['control'])
             self.ctrls.append(ctrl)
     # Read LDIF data
     parser = ldif.LDIFRecordList(fh)
     parser.parse()
     for dn, entry in parser.all_records:
         ctrls = [
             LdapResponseControl.from_ldif(x.decode())
             for x in entry.pop('control', [])
         ]
         if self.type == ldap.RES_INTERMEDIATE:
             ctrl = ctrls.pop(0)
             dn = ctrl.controlType
             entry = ctrl.encodedControlValue
         self.data.append((dn, entry, ctrls))
     return self
Пример #6
0
def main():
    if len(sys.argv) < 2:
        print("Usage: {} <ldif_dump.ldif> <outfile>".format(sys.argv[0]))
        sys.exit(1)

    ldif_file = sys.argv[1]
    outf = sys.argv[2]

    out = []
    with open(ldif_file, mode='r') as ldiff:
        parser = ldif.LDIFRecordList(ldiff)
        parser.parse()

    for user in parser.all_records:
        out.append({k: user[1].get(k, None) for k in ['userPassword', 'cn']})

    with open(outf, 'w') as outfile:
        for each in out:
            try:
                info = each['cn'][0].decode(
                ) + ":" + each['userPassword'][0].decode()
                print(info)
                outfile.write(info + "\n")
            except Exception as e:
                #print(e)
                pass
Пример #7
0
    def add_vlv(self, subsystem, bind_dn, bind_password):

        input_file = tempfile.NamedTemporaryFile(delete=False)

        try:
            subsystem.customize_file(TPS_VLV_PATH, input_file.name)

            conn = subsystem.open_database(bind_dn=bind_dn,
                                           bind_password=bind_password)

            try:
                parser = ldif.LDIFRecordList(open(input_file.name, 'rb'))
                parser.parse()

                for dn, entry in parser.all_records:

                    logger.info('Adding %s', dn)

                    add_modlist = ldap.modlist.addModlist(entry)
                    conn.ldap.add_s(dn, add_modlist)

            finally:
                conn.close()

        finally:
            os.unlink(input_file.name)

        self.print_message('VLVs added')
Пример #8
0
    def restore_entry(self, context):
        # Restore the entry
        if self.ldif:
            # Parse the LDIF back to an attribute list
            ldif_dump = StringIO(str(self.ldif))
            ldif_parser = ldif.LDIFRecordList(ldif_dump, max_entries=1)
            ldif_parser.parse()
            dn, entry = ldif_parser.all_records[0]
            ldif_dump.close()

            if self.dn != dn:
                bareosfd.JobMessage(
                    context,
                    bJobMessageType["M_INFO"],
                    "Restoring original DN %s as %s\n" % (dn, self.dn),
                )

            if dn:
                if self.ld:
                    # Try adding the entry
                    add_ldif = ldap.modlist.addModlist(entry)
                    try:
                        self.ld.add_s(self.dn, add_ldif)
                    except ldap.LDAPError as e:
                        # Delete the original DN
                        try:
                            self.ld.delete_s(self.dn)
                            self.ld.add_s(self.dn, add_ldif)
                        except ldap.LDAPError as e:
                            if type(e.message) == dict and "desc" in e.message:
                                bareosfd.JobMessage(
                                    context,
                                    bJobMessageType["M_ERROR"],
                                    "Failed to restore LDAP DN %s: %s\n" %
                                    (self.dn, e.message["desc"]),
                                )
                            else:
                                bareosfd.JobMessage(
                                    context,
                                    bJobMessageType["M_ERROR"],
                                    "Failed to restore LDAP DN %s: %s\n" %
                                    (self.dn, e),
                                )
                            self.ldif = None
                            return bRCs["bRC_Error"]
                else:
                    bareosfd.JobMessage(
                        context,
                        bJobMessageType["M_ERROR"],
                        "Failed to restore LDAP DN %s no writable binding to LDAP exists\n"
                        % (self.dn),
                    )
                    self.ldif = None
                    return bRCs["bRC_Error"]

            # Processed ldif
            self.ldif = None

        return bRCs["bRC_OK"]
Пример #9
0
 def test_subschema_file(self):
     for test_file in TEST_SUBSCHEMA_FILES:
         # Read and parse LDIF file
         ldif_file = open(test_file, 'rb')
         ldif_parser = ldif.LDIFRecordList(ldif_file,max_entries=1)
         ldif_parser.parse()
         _, subschema_subentry = ldif_parser.all_records[0]
         sub_schema = ldap.schema.SubSchema(subschema_subentry)
Пример #10
0
def urlfetch(uri, trace_level=0):
    """
  Fetches a parsed schema entry by uri.

  If uri is a LDAP URL the LDAP server is queried directly.
  Otherwise uri is assumed to point to a LDIF file which
  is loaded with urllib.
  """
    uri = uri.strip()
    if uri.startswith('ldap:') or uri.startswith('ldaps:') or uri.startswith(
            'ldapi:'):
        import ldapurl
        ldap_url = ldapurl.LDAPUrl(uri)

        # This is an internal function; don't enable bytes_mode.
        l = ldap.initialize(ldap_url.initializeUrl(),
                            trace_level,
                            bytes_mode=False)
        l.protocol_version = ldap.VERSION3
        l.simple_bind_s(ldap_url.who or '', ldap_url.cred or '')
        subschemasubentry_dn = l.search_subschemasubentry_s(ldap_url.dn)
        if subschemasubentry_dn is None:
            s_temp = None
        else:
            if ldap_url.attrs is None:
                schema_attrs = SCHEMA_ATTRS
            else:
                schema_attrs = ldap_url.attrs
            s_temp = l.read_subschemasubentry_s(subschemasubentry_dn,
                                                attrs=schema_attrs)
        l.unbind_s()
        del l
    else:
        import ldif
        from ldap.compat import urlopen
        ldif_file = urlopen(uri)
        ldif_parser = ldif.LDIFRecordList(ldif_file, max_entries=1)
        ldif_parser.parse()
        subschemasubentry_dn, s_temp = ldif_parser.all_records[0]
    # Work-around for mixed-cased attribute names
    subschemasubentry_entry = ldap.cidict.cidict()
    s_temp = s_temp or {}
    for at, av in s_temp.items():
        if at in SCHEMA_CLASS_MAPPING:
            try:
                subschemasubentry_entry[at].extend(av)
            except KeyError:
                subschemasubentry_entry[at] = av
    # Finally parse the schema
    if subschemasubentry_dn != None:
        parsed_sub_schema = ldap.schema.SubSchema(subschemasubentry_entry)
    else:
        parsed_sub_schema = None
    return subschemasubentry_dn, parsed_sub_schema
Пример #11
0
def test_init_ldap_server():
    """
    Create some resources in the LDAP instance
    """
    with open(os.path.join(os.getenv("VAULT_CONFIG"), "ldap-config.ldif"),
              'rb') as fd:
        ldif_parser = ldif.LDIFRecordList(fd)
        ldif_parser.parse()
        for dn, entry in ldif_parser.all_records:
            add_modlist = modlist.addModlist(entry)
            LDAP.add_s(dn, add_modlist)
Пример #12
0
def ldapadd(conn, ldiffile):
    lfd = open(ldiffile)
    ldrl = ldif.LDIFRecordList(lfd)
    ldrl.parse()
    lfd.close()
    for dn, entry in ldrl.all_records:
        mylist = []
        for attr, vals in entry.iteritems():
            mylist.append((attr, vals))
        try: conn.add_s(dn, mylist)
        except ldap.ALREADY_EXISTS: pass
        except ldap.UNDEFINED_TYPE: pass
Пример #13
0
 def file_to_ldap(self, filename):
     """convert the given schema file name to its python-ldap format
     suitable for passing to ldap.schema.SubSchema()
     @param filename - the full path and filename of a schema file in ldif format"""
     import urllib, ldif
     ldif_file = urllib.urlopen(filename)
     ldif_parser = ldif.LDIFRecordList(ldif_file, max_entries=1)
     if not ldif_parser:
         return None
     ldif_parser.parse()
     if not ldif_parser.all_records:
         return None
     return ldif_parser.all_records[0][1]
Пример #14
0
    def enable_autofs_schema(self, basedn):
        """ Enable autofs schema

            :param str basedn: base dn of the ldap server
            :return: None
            :Exceptions: None
        """

        autofs_schema = ("""
dn: ou=automount,%s
ou: automount
objectClass: organizationalUnit

dn: CN=auto.master,OU=automount,%s
objectClass: top
objectClass: nisMap
cn: auto.master
nisMapName: auto.master

dn: cn=/-,cn=auto.master,ou=automount,%s
objectClass: nisObject
objectClass: top
cn: /-
nisMapEntry: auto.direct
nisMapName: auto.master

dn: cn=/home,cn=auto.master,ou=automount,%s
objectClass: nisObject
objectClass: top
cn: /home
nisMapEntry: auto.home
nisMapName: auto.master

dn: cn=auto.direct,ou=automount,%s
objectClass: nisMap
objectClass: top
cn: auto.direct
nisMapName: auto.direct

dn: cn=auto.home,ou=automount,%s
objectClass: nisMap
objectClass: top
nisMapName: auto.home""") % (basedn, basedn, basedn, basedn, basedn, basedn)
        ldif_file = StringIO(autofs_schema)
        parser = ldif.LDIFRecordList(ldif_file)
        parser.parse()

        for ldap_dn, entry in parser.all_records:
            self.add_entry(entry, ldap_dn)
Пример #15
0
    def write(self, dn, old_data, new_data):
        log.debug('write DN: %s' % dn)
        old_dict = ldif.LDIFRecordList(io.StringIO(old_data.decode('utf-8')))
        old_dict.parse()
        log.debug("dict_old: %s" % old_dict.all_records)
        new_dict = ldif.LDIFRecordList(io.StringIO(new_data.decode('utf-8')))
        new_dict.parse()
        log.debug("dict_new: %s" % new_dict.all_records)
        _ldif = modlist.modifyModlist(old_dict.all_records[0][1],
                                      new_dict.all_records[0][1])
        if (1, 'objectCategory', None) in _ldif:
            (_, o) = old_dict.all_records[0]
            (_, n) = new_dict.all_records[0]
            cat_old = o['objectCategory'][0].decode(
                'utf-8')[:-len(self.ldap.schemaDN) - 1]
            cat_new = n['objectCategory'][0].decode(
                'utf-8')[:-len(self.ldap.schemaDN) - 1]
            log.debug('objectCategory changed for %s (%s -> %s)' %
                      (dn, cat_old, cat_new))
            return cat_new

        log.debug("_ldif: %s" % _ldif)
        self.ldap.apply_diff(dn, _ldif)
        return None
Пример #16
0
 def _parsemessage(self, datagram):
     payload = ''.join(datagram.splitlines(True)[3:])
     ldifdata = ldif.LDIFRecordList(StringIO.StringIO(payload))
     ldifdata.parse()
     ldifdn, ldifattr = ldifdata.all_records[0]
     #self.loghandle.debug("ldif data dn = %s,attrs= %s", ldifdn, ldifattr)
     reqdn = ldifattr['reqDN'][0]
     reqtype = ldifattr['reqType'][0]
     if reqtype != "delete":
         reqmod = ldifattr['reqMod']
     else:
         reqmod = None
     reqresult = ldifattr['reqResult'][0]
     req = (reqtype, reqdn, reqmod, reqresult)
     self.callback(req)
Пример #17
0
    def file_to_ldap(self, filename):
        """Convert the given schema file name to its python-ldap format
        suitable for passing to ldap.schema.SubSchema()

        :param filename: the full path and filename of a schema file in ldif format
        :type filename: str
        """

        with open(filename, 'r') as f:
            ldif_parser = ldif.LDIFRecordList(f, max_entries=1)
        if not ldif_parser:
            return None
        ldif_parser.parse()
        if not ldif_parser.all_records:
            return None
        return ldif_parser.all_records[0][1]
Пример #18
0
    def fn(fname, host='localhost'):

        fh = open(os.path.join(os.path.dirname(os.path.realpath(__file__)), fname), 'rb')
        ctrl = ldif.LDIFRecordList(fh)
        ctrl.parse()

        directory = dict(ctrl.all_records)

        mockldap = MockLdap(directory)

        mockldap.start()
        mockldap['ldap://{}/'.format(host)]

        conn = ldap.initialize('ldap://{}/'.format(host))

        return conn
Пример #19
0
    def test_subschema_file(self):
        for test_file in TEST_SUBSCHEMA_FILES:
            # Read and parse LDIF file
            with open(test_file, 'rb') as ldif_file:
                ldif_parser = ldif.LDIFRecordList(ldif_file,max_entries=1)
                ldif_parser.parse()
            _, subschema_subentry = ldif_parser.all_records[0]
            sub_schema = ldap.schema.SubSchema(subschema_subentry)

            # Smoke-check for listall() and attribute_types()
            for objclass in sub_schema.listall(ObjectClass):
                must, may = sub_schema.attribute_types([objclass])

                for oid, attributetype in must.items():
                    self.assertEqual(attributetype.oid, oid)
                for oid, attributetype in may.items():
                    self.assertEqual(attributetype.oid, oid)
Пример #20
0
    def _read_defaults(self):
        spath = self._get_defaults_loc(DEFAULTS_PATH)
        self._config = configparser.ConfigParser()
        self._config.read([spath])
        if self._is_container:
            # Load some values over the top that are container specific
            self._config.set(SECTION, "pid_file",
                             "/data/run/slapd-localhost.pid")
            self._config.set(SECTION, "ldapi",
                             "/data/run/slapd-localhost.socket")
        self._defaults_cached = True

        # Now check the dse.ldif (if present) to see if custom paths were set
        if self._serverid:
            # Get the dse.ldif from the instance name
            prefix = os.environ.get('PREFIX', ""),
            if self._serverid.startswith("slapd-"):
                self._serverid = self._serverid.replace("slapd-", "", 1)
            dsepath = "{}/etc/dirsrv/slapd-{}/dse.ldif".format(
                prefix[0], self._serverid)
        elif self._instance is not None:
            ds_paths = Paths(self._instance.serverid, None)
            dsepath = os.path.join(ds_paths.config_dir, 'dse.ldif')
        else:
            # Nothing else to do but return
            return

        try:
            from lib389.utils import ensure_str  # prevents circular import errors
            with open(dsepath, 'r') as file_dse:
                dse_parser = ldif.LDIFRecordList(file_dse, max_entries=2)
                if dse_parser is None:
                    return
                dse_parser.parse()
                if dse_parser.all_records is None:
                    return
                # We have the config, start processing the DSE_MAP
                config = dse_parser.all_records[1]  # cn=config
                attrs = config[1]
                for attr in DSE_MAP.keys():
                    if attr in attrs.keys():
                        self._config.set(SECTION, DSE_MAP[attr],
                                         ensure_str(attrs[attr][0]))
        except:
            # No dse.ldif or can't read it, no problem just skip it
            pass
Пример #21
0
    def add_vlv(self, subsystem, bind_dn, bind_password):

        ldif_file = tempfile.NamedTemporaryFile(delete=False)
        subsystem.customize_file(KRA_VLV_PATH, ldif_file.name)

        conn = subsystem.open_database(bind_dn=bind_dn,
                                       bind_password=bind_password)

        try:
            parser = ldif.LDIFRecordList(open(ldif_file.name, "rb"))
            parser.parse()
            for dn, entry in parser.all_records:
                add_modlist = ldap.modlist.addModlist(entry)
                conn.ldap.add_s(dn, add_modlist)

        finally:
            os.unlink(ldif_file.name)
            conn.close()
Пример #22
0
def _move_ruv(ldif_file):
    """ Move RUV entry in an ldif file to the top"""

    with open(ldif_file) as f:
        parser = ldif.LDIFRecordList(f)
        parser.parse()

        ldif_list = parser.all_records
        for dn in ldif_list:
            if dn[0].startswith('nsuniqueid=ffffffff-ffffffff-ffffffff-ffffffff'):
                ruv_index = ldif_list.index(dn)
                ldif_list.insert(0, ldif_list.pop(ruv_index))
                break

    with open(ldif_file, 'w') as f:
        ldif_writer = ldif.LDIFWriter(f)
        for dn, entry in ldif_list:
            ldif_writer.unparse(dn, entry)
Пример #23
0
    def modifications_from_ldif(self, ldif_file):
        """
        Parse ldif file. Default operation is add, only changetypes "add"
        and "modify" are supported.
        :param ldif_file: an opened file for read
        :raises: ValueError
        """
        parser = ldif.LDIFRecordList(ldif_file)
        parser.parse()

        last_dn = None
        for dn, entry in parser.all_records:
            if dn is None:
                # ldif parser return None, if records belong to previous DN
                dn = last_dn
            else:
                last_dn = dn

            if "replace" in entry:
                for attr in entry["replace"]:
                    attr = attr.decode('utf-8')
                    try:
                        self.replace_value(dn, attr, entry[attr])
                    except KeyError:
                        raise ValueError("replace: {dn}, {attr}: values are "
                                         "missing".format(dn=dn, attr=attr))
            elif "delete" in entry:
                for attr in entry["delete"]:
                    attr = attr.decode('utf-8')
                    self.remove_value(dn, attr, entry.get(attr, None))
            elif "add" in entry:
                for attr in entry["add"]:
                    attr = attr.decode('utf-8')
                    try:
                        self.replace_value(dn, attr, entry[attr])
                    except KeyError:
                        raise ValueError("add: {dn}, {attr}: values are "
                                         "missing".format(dn=dn, attr=attr))
            else:
                logger.error(
                    "Ignoring entry: %s : only modifications "
                    "are allowed (missing \"changetype: "
                    "modify\")", dn)
Пример #24
0
def urlfetch(uri, trace_level=0):
    """
  Fetches a parsed schema entry by uri.

  If uri is a LDAP URL the LDAP server is queried directly.
  Otherwise uri is assumed to point to a LDIF file which
  is loaded with urllib.
  """
    uri = uri.strip()
    if uri.startswith('ldap:') or uri.startswith('ldaps:') or uri.startswith(
            'ldapi:'):
        import ldapurl
        ldap_url = ldapurl.LDAPUrl(uri)
        l = ldap.initialize(ldap_url.initializeUrl(), trace_level)
        l.protocol_version = ldap.VERSION3
        l.simple_bind_s(ldap_url.who or '', ldap_url.cred or '')
        subschemasubentry_dn = l.search_subschemasubentry_s(
            ldap_url.dn).decode()
        if subschemasubentry_dn is None:
            subschemasubentry_entry = None
        else:
            if ldap_url.attrs is None:
                schema_attrs = SCHEMA_ATTRS
            else:
                schema_attrs = ldap_url.attrs
            subschemasubentry_entry = l.read_subschemasubentry_s(
                subschemasubentry_dn, attrs=schema_attrs)
        subschemasubentry_entry = By2Str(subschemasubentry_entry)
        l.unbind_s()
        del l
    else:
        import urllib.request, ldif
        ldif_file = urllib.request.urlopen(uri)
        ldif_parser = ldif.LDIFRecordList(ldif_file, max_entries=1)
        ldif_parser.parse()
        subschemasubentry_dn, subschemasubentry_entry = ldif_parser.all_records[
            0]
    if subschemasubentry_dn != None:
        parsed_sub_schema = ldap.schema.SubSchema(subschemasubentry_entry)
    else:
        parsed_sub_schema = None
    return subschemasubentry_dn, parsed_sub_schema
Пример #25
0
 def _parse_records(
     self,
     ldif_string,
     ignored_attr_types=None,
     max_entries=0,
 ):
     """
     Parse LDIF data in `ldif_string' into list of records
     """
     ldif_file = StringIO(ldif_string)
     ldif_parser = ldif.LDIFRecordList(
         ldif_file,
         ignored_attr_types=ignored_attr_types,
         max_entries=max_entries,
     )
     parser_method = getattr(ldif_parser,
                             'parse_%s_records' % self.record_type)
     parser_method()
     if self.record_type == 'entry':
         return ldif_parser.all_records
     elif self.record_type == 'change':
         return ldif_parser.all_modify_changes
Пример #26
0
    def generate_ldif(self, subsystem, out_file):

        tmp_file = tempfile.NamedTemporaryFile(delete=False)

        try:
            subsystem.customize_file(TPS_VLV_PATH, tmp_file.name)

            parser = ldif.LDIFRecordList(open(tmp_file.name, 'rb'))
            parser.parse()

            with open(out_file, 'w') as outfile:

                writer = ldif.LDIFWriter(outfile)

                for dn, _ in reversed(parser.all_records):
                    entry = {'changetype': ['delete']}
                    writer.unparse(dn, entry)

            self.print_message('Output: %s' % out_file)

        finally:
            os.unlink(tmp_file.name)
Пример #27
0
def parseAudit(fname):
    f = open(fname)
    lrl = ldif.LDIFRecordList(f)
    lrl.parse()
    f.close()
    modlist = []
    savets = None
    savedn = None
    for dn, ent in lrl.all_records:
        ts, optype, data = parseRec(ldap.cidict.cidict(ent))
        if optype == ldap.REQ_MODIFY:
            if data[1]:  # do not add stripped mods
                if dn:
                    if modlist:
                        addAuditOp(ModReq(savedn, auditts=savets,
                                          mods=modlist))
                    modlist, savets, savedn = ([data], ts, dn)
                else:  # continuation
                    modlist.append(data)
            elif dn and not savedn:  # but save ts and dn in case the first mod is stripped
                modlist, savets, savedn = ([], ts, dn)
        else:
            if modlist:
                addAuditOp(ModReq(dn, auditts=savets, mods=modlist))
            modlist, savets, savedn = ([], None, None)
            if optype == ldap.REQ_ADD:
                req = AddReq(dn, auditts=ts, ent=data)
            elif optype == ldap.REQ_MODRDN:
                req = MdnReq(dn,
                             auditts=ts,
                             newrdn=data.get('newrdn', None),
                             deleteoldrdn=data.get('deleteoldrdn', None),
                             newsuperior=data.get('newsuperior', None))
            elif optype == ldap.REQ_DELETE:
                req = DelReq(dn, auditts=ts)
            addAuditOp(req)
    if modlist and savedn and savets:
        addAuditOp(ModReq(savedn, auditts=savets, mods=modlist))
Пример #28
0
def _remove_replication_data(ldif_file):
    """ Remove the replication data from ldif file:
        db2lif without -r includes some of the replica data like
        - nsUniqueId
        - keepalive entries
        This function filters the ldif fil to remove these data
    """

    with open(ldif_file) as f:
        parser = ldif.LDIFRecordList(f)
        parser.parse()

        ldif_list = parser.all_records
        # Iterate on a copy of the ldif entry list
        for dn, entry in ldif_list[:]:
            if dn.startswith('cn=repl keep alive'):
                ldif_list.remove((dn, entry))
            else:
                entry.pop('nsUniqueId')
    with open(ldif_file, 'w') as f:
        ldif_writer = ldif.LDIFWriter(f)
        for dn, entry in ldif_list:
            ldif_writer.unparse(dn, entry)
Пример #29
0
      'leadingspace':[' this strings contains a leading space'],
      'trailingspace':['this strings contains a trailing space '],
      'emptyvalue':['',''],
      'singlespace':[' '],
    },
  )
]

test_entry_ldif = """dn:
emptyvalue:
emptyvalue: 
emptyvalue:  

"""

ldif_parser = ldif.LDIFRecordList(StringIO.StringIO(test_entry_ldif))
ldif_parser.parse()
test_entry_records.extend(ldif_parser.all_records)

for test_dn,test_entry in test_entry_records:
  ldif_lines = ldif.CreateLDIF(
    test_dn,test_entry,['bin']
  )
  sys.stdout.write(ldif_lines)
  ldif_parser = ldif.LDIFRecordList(StringIO.StringIO(ldif_lines))
  ldif_parser.parse()
  result_entry = ldif_parser.all_records[0][1]
#  print test_entry
  for a in test_entry.keys():
    test_entry[a].sort();result_entry[a].sort()
    if test_entry[a]!=result_entry[a]:
Пример #30
0
            for value in self.attrs[attrname]:
                replaced.add(re.sub(pattern, replacement, value))
            self.attrs[attrname] = replaced
            # static
        for attrname, values in config.items("static"):
            if attrname not in self.attrs:
                self.attrs[attrname] = set()
            values = values.split(", ")
            for value in values:
                self.attrs[attrname].add(value)

    def __str__(self):
        return "%s\n%s\n" % (self.dn.encode("utf-8"), self.attrs)

    def __hash__(self):
        return self.dnhash

    def get_dn_and_entry(self):
        return self.dn, self.attrs


sourceparser = ldif.LDIFRecordList(open(source))
sourceparser.parse()
people = set()
for dn, entry in sourceparser.all_records:
    people.add(Target(entry))

writer = ldif.LDIFWriter(open(target, 'w'))
for person in people:
    writer.unparse(*person.get_dn_and_entry())