Esempio n. 1
0
def generate_ldif_from_list(action, array):
    """generate ldif string by array
    Parameters:
        array: a list contains several dicts which contains user or group info.
    """
    if isinstance(array, list):
        output = StringIO()
        w = LDIFWriter(output)
        for a in array:
            if a.has_key('dn'):
                dn = a.pop('dn')
                for k, v in a.iteritems():
                    if not isinstance(v, list):
                        a[k] = [v]

                w.unparse(dn, a)
            else:
                logger.error('the element of ldif does not have "dn": %s', a)


        output.reset()
        r = output.read()
        output.close()

        return r
Esempio n. 2
0
def _entries_to_ldif(entries):
    """Format LDAP entries as LDIF"""
    io = StringIO()
    writer = LDIFWriter(io)
    for entry in entries:
        writer.unparse(str(entry.dn), dict(entry.raw))
    return io.getvalue()
Esempio n. 3
0
class FixLDIF(LDIFParser):
    def __init__(self, input, output):
        LDIFParser.__init__(self, input)
        self.writer = LDIFWriter(output)

    def handle(self, dn, entry):
        dn = self.fix_dn(dn)
        self.fix_entry(entry)
        self.fix_objectclass(entry['objectclass'])
        self.writer.unparse(dn, entry)

    def fix_dn(self, dn):
        head = dn.split(',', 1)[0]
        return head + ',' + basedn

    def fix_entry(self, entry):
        for value in ignore_attribute:
            if value in entry:
                del entry[value]
        for target, source in copy_attribute:
            entry[target] = entry[source]

    def fix_objectclass(self, objectclass):
        for value in ignore_objectclass:
            if value in objectclass:
                objectclass.remove(value)
Esempio n. 4
0
def _entries_to_ldif(entries):
    """Format LDAP entries as LDIF"""
    io = StringIO()
    writer = LDIFWriter(io)
    for entry in entries:
        writer.unparse(str(entry.dn), dict(entry.raw))
    return io.getvalue()
Esempio n. 5
0
    def to_ldif(self, output_file=sys.stdout):
        """Get an LDIF formated output from the LDAP entry.

        :param output_file: Any filehandler object. Default is stdout.

        """
        ldif_writer = LDIFWriter(output_file)
        ldif_writer.unparse(self._dn, dict(self._attrs))
class ActiveDirectoryToOpenLdapLDIFConvertor(LDIFParser):
    objectclassAddsBasedOnDN = { 'CN=ExchangeActiveSyncDevices' : 'exchangeActiveSyncDevices'
                               }

    objectclassChangesBasedOnDN = { 'CN=_Template ': { 'user': '******' },
                                    'CN=_Template_': { 'user': '******' },
                                    'CN=_Template\, ': { 'user': '******' }
                                  }

    objectclassMappings = { 'top' : 'mstop', 'user' : 'customActiveDirectoryUser', 'group' : 'customActiveDirectoryGroup',
                            'contact' : 'customActiveDirectoryContact' }

    attributetypesValuesDuplicates = [ 'dSCorePropagationData' ]

    def __init__(self, input, output):
        LDIFParser.__init__(self, input)
        self.writer = LDIFWriter(output)

    def addObjectclassesBasedOnDN(self, dn, entry):
        for objAdd in self.objectclassAddsBasedOnDN:
            if objAdd.lower() in dn.lower(): # case insensitive match
                if 'objectClass' not in entry.keys():        
                    entry['objectClass'] = [ ]
                entry['objectClass'].append(self.objectclassAddsBasedOnDN[objAdd]);

    def changeObjectclassesBasedOnDN(self, dn, entry):
        if 'objectClass' not in entry.keys():
            return
        for objChange in self.objectclassChangesBasedOnDN:
            if objChange.lower() in dn.lower(): # case insensitive match
                for objSource in self.objectclassChangesBasedOnDN[objChange]:
                    index = 0
                    for objTarget in entry['objectClass']:
                        if objSource == objTarget:
                            entry['objectClass'][index] = self.objectclassChangesBasedOnDN[objChange][objSource]
                        index += 1

    def changeObjectclasses(self, dn, entry):
        if 'objectClass' in entry.keys():        
            index = 0
            for objectclass in entry['objectClass']:
                for objMap in self.objectclassMappings:
                    if objMap == objectclass:
                        entry['objectClass'][index] = self.objectclassMappings[objMap]
                index += 1

    def removeDuplicateAttributeValues(self, dn, entry):
        for attributetype in self.attributetypesValuesDuplicates:
            if attributetype in entry.keys():
                entry[attributetype] = list(set(entry[attributetype]))


    def handle(self, dn, entry):
        self.addObjectclassesBasedOnDN(dn, entry)
        self.changeObjectclassesBasedOnDN(dn, entry)
        self.changeObjectclasses(dn, entry)
        self.removeDuplicateAttributeValues(dn, entry)
        self.writer.unparse(dn, entry)
Esempio n. 7
0
class StrLdif(object):
    def __init__(self):
        self.sio = StringIO()
        self.ldif_writer = LDIFWriter(self.sio)

    def unparse(self, dn, attrs):
        self.ldif_writer.unparse(dn, attrs)

    def ldif(self):
        return self.sio.getvalue()
Esempio n. 8
0
 def __init__(self, diff_fil):
     """
     `diff_fil`: the file-like object into which the incremental
     ldifs will be written.
     """
     self.writer = LDIFWriter(diff_fil)
     # Unfortunately we have to maintain this separately from the
     # LDIFWriter since the writer appears to offer no way to
     # delete a full dn.  See handle_delete.
     self.diff_fil = diff_fil
Esempio n. 9
0
 def ldap(self, command, attrs):
     self.logger.debug('received command %s %s', command, attrs)
     if command == 'SEARCH':
         out = StringIO()
         ldif_writer = LDIFWriter(out)
         qs = get_user_model().objects.all()
         if attrs['filter'] != '(objectClass=*)':
             m = re.match(r'\((\w*)=(.*)\)', attrs['filter'])
             if not m:
                 print 'RESULT'
                 print 'code: 1'
                 print 'info: invalid filter'
                 print
                 return
             for user_attribute, ldap_attribute in MAPPING.iteritems():
                 if ldap_attribute == m.group(1):
                     break
             else:
                 print 'RESULT'
                 print 'code: 1'
                 print 'info: unknown attribute in filter'
                 print
                 return
             value = m.group(2)
             if value.endswith('*') and value.startswith('*'):
                 user_attribute += '__icontains'
                 value = value[1:-1]
             elif value.endswith('*'):
                 user_attribute += '__istartswith'
                 value = value[:-1]
             elif value.startswith('*'):
                 user_attribute += '__iendswith'
                 value = value[1:]
             else:
                 user_attribute += '__iexact'
             value = unescape_filter_chars(value)
             qs = qs.filter(**{user_attribute: value.decode('utf-8')})
         for user in qs:
             o = {}
             for user_attribute, ldap_attribute in MAPPING.iteritems():
                 o[ldap_attribute] = [
                     unicode(getattr(user, user_attribute)).encode('utf-8')
                 ]
             o['objectClass'] = ['inetOrgPerson']
             dn = 'uid=%s,%s' % (escape_dn_chars(
                 o['uid'][0]), attrs['suffix'])
             self.logger.debug(u'sending entry %s %s', dn, o)
             ldif_writer.unparse(dn, o)
         print out.getvalue(),
         out.close()
     print 'RESULT'
     print 'code: 0'
     print 'info: RockNRoll'
     print
class StrLdif(object):

    def __init__(self):
        self.sio = StringIO()
        self.ldif_writer = LDIFWriter(self.sio)

    def unparse(self, dn, attrs):
        self.ldif_writer.unparse(dn, attrs)

    def ldif(self):
        return self.sio.getvalue()
Esempio n. 11
0
    def take_action(self, args):
        conn = self.app.conn
        b = self.app.options.b
        context = args.subtree
        groupname = args.groupname

        base_dn = '%s,%s' % (context, b)
        filter = '(cn=%s)' % groupname
        writer = LDIFWriter(sys.stdout)
        for dn, attrs in conn.search_s(base_dn, ldap.SCOPE_SUBTREE, filter):
            writer.unparse(dn, attrs)
Esempio n. 12
0
class DiffWriter(object):
    """
    A moderately intelligent bridge that interprets adds, changes, and
    deletes between two ldif files and writes them as incremental
    ldifs to an output file.

    Not intended for use outside this module.
    """

    def __init__(self, diff_fil):
        """
        `diff_fil`: the file-like object into which the incremental
        ldifs will be written.
        """
        self.writer = LDIFWriter(diff_fil)
        # Unfortunately we have to maintain this separately from the
        # LDIFWriter since the writer appears to offer no way to
        # delete a full dn.  See handle_delete.
        self.diff_fil = diff_fil

    def handle_add(self, dn_entry):
        """
        Write an incremental ldif to add the supplied dn_entry.
        """
        addition = modlist.addModlist(dn_entry.entry)
        self.writer.unparse(dn_entry.dn, addition)

    def handle_change(self, old_dn_entry, new_dn_entry):
        """
        Write an incremental ldif to modify the old entry into the new
        entry.

        If old_dn_entry and new_dn_entry are identical, acts as a
        no-op.

        Raises an exception if the old and new entries don't have the
        same dn.
        """
        if old_dn_entry.dn != new_dn_entry.dn:
            raise NonMatchingDnException("Old and new dn'ss must be the same.")
        changes = modlist.modifyModlist(old_dn_entry.entry, new_dn_entry.entry)
        if changes:
            self.writer.unparse(old_dn_entry.dn, changes)

    def handle_delete(self, dn_entry):
        """
        Write the incremental ldif to delete the dn of the supplied
        entry.
        """
        self.diff_fil.write("dn: %s\n" % dn_entry.dn)
        self.diff_fil.write('changetype: delete\n')
        self.diff_fil.write('\n')
Esempio n. 13
0
 def ldap(self, command, attrs):
     self.logger.debug('received command %s %s', command, attrs)
     if command == 'SEARCH':
         out = StringIO()
         ldif_writer = LDIFWriter(out)
         qs = get_user_model().objects.all()
         if attrs['filter'] != '(objectClass=*)':
             m = re.match(r'\((\w*)=(.*)\)', attrs['filter'])
             if not m:
                 print 'RESULT'
                 print 'code: 1'
                 print 'info: invalid filter'
                 print
                 return
             for user_attribute, ldap_attribute in MAPPING.iteritems():
                 if ldap_attribute == m.group(1):
                     break
             else:
                 print 'RESULT'
                 print 'code: 1'
                 print 'info: unknown attribute in filter'
                 print
                 return
             value = m.group(2)
             if value.endswith('*') and value.startswith('*'):
                 user_attribute += '__icontains'
                 value = value[1:-1]
             elif value.endswith('*'):
                 user_attribute += '__istartswith'
                 value = value[:-1]
             elif value.startswith('*'):
                 user_attribute += '__iendswith'
                 value = value[1:]
             else:
                 user_attribute += '__iexact'
             value = unescape_filter_chars(value)
             qs = qs.filter(**{user_attribute: value.decode('utf-8')})
         for user in qs:
             o = {}
             for user_attribute, ldap_attribute in MAPPING.iteritems():
                 o[ldap_attribute] = [unicode(getattr(user, user_attribute)).encode('utf-8')]
             o['objectClass'] = ['inetOrgPerson']
             dn = 'uid=%s,%s' % (escape_dn_chars(o['uid'][0]), attrs['suffix'])
             self.logger.debug(u'sending entry %s %s', dn, o)
             ldif_writer.unparse(dn, o)
         print out.getvalue(),
         out.close()
     print 'RESULT'
     print 'code: 0'
     print 'info: RockNRoll'
     print
Esempio n. 14
0
class globals:
    def __init__(self):
        self.global_objs = {}
        self.ldif = LDIFWriter(sys.stdout)

    def add_attr(self, dn, attname, vals):
        if dn not in self.global_objs:
           self.global_objs[dn] = {}
        self.global_objs[dn][attname] = vals

    def print_all(self):
        for dn, obj in self.global_objs.items():
           self.ldif.unparse(dn, obj)
           continue
        self.global_objs = {}
Esempio n. 15
0
class globals:
    def __init__(self):
        self.global_objs = {}
        self.ldif = LDIFWriter(sys.stdout)

    def add_attr(self, dn, attname, vals):
        if dn not in self.global_objs:
            self.global_objs[dn] = {}
        self.global_objs[dn][attname] = vals

    def print_all(self):
        for dn, obj in self.global_objs.items():
            self.ldif.unparse(dn, obj)
            continue
        self.global_objs = {}
Esempio n. 16
0
 def __init__(self,input,output,root,ns1,ns2,outputdir='.'):
    LDIFParser.__init__(self,input)
    rantoday = open('.rantoday', 'r')
    try:
        serial = rantoday.readline()
        print serial
        if serial != '': 
            serial = int(serial) 
        else: 
            serial = 0
        serialNum = serial + 1
        rantoday.close()
        rantoday = open('.rantoday', 'w+')
        rantoday.write(str(serialNum))
        print serialNum
        rantoday.close()
    except IOError:
        exit("couldn't read local directory, to create a .rantoday file to help counting the reruns to get the serial to increase")
    self.serial = serialNum       
    self.writer = LDIFWriter(output)
    self.megaArray = {}
    self.cnamed = []
    self.valueInEntries = []
    self.megaTree = {}
    self.subDomainRecords = {}
    self.root = root
    self.ns1 = ns1
    self.ns2 = ns2
    self.zoneSubDirectory = outputdir
    self.megaWeirdArray= {}
    self.zoneArray = {}
    self.zoneArray = {}
    self.managedZones = {}
    self.exempted = {2: ['co.uk', 'org.ua', 'com.ar']}
Esempio n. 17
0
 def ldif_writer(self):
     if not self._ldif_writer:
         logger.info(
             'Original zones will be saved in LDIF format in '
             '%s file', self.backup_path)
         self._ldif_writer = LDIFWriter(open(self.backup_path, 'w'))
     return self._ldif_writer
Esempio n. 18
0
class ActiveDirectoryDefaultUserSetup(LDIFParser):
    password = ""

    def __init__(self, input, output, password):
        LDIFParser.__init__(self, input)
        self.writer = LDIFWriter(output)
        self.password = password

    def setUserDefaultPassword(self, dn, entry):
        if 'objectClass' not in entry.keys():
            return
        if 'user' in entry['objectClass']:
            entry['userPassword'] = [self.password]

    def handle(self, dn, entry):
        self.setUserDefaultPassword(dn, entry)
        self.writer.unparse(dn, entry)
class ActiveDirectoryDefaultUserSetup(LDIFParser):
    password = ""

    def __init__(self, input, output, password):
        LDIFParser.__init__(self, input)
        self.writer = LDIFWriter(output)
        self.password = password

    def setUserDefaultPassword(self, dn, entry):
        if 'objectClass' not in entry.keys():
            return
        if 'user' in entry['objectClass']:
            entry['userPassword'] = [ self.password ]


    def handle(self, dn, entry):
        self.setUserDefaultPassword(dn, entry)
        self.writer.unparse(dn, entry)
Esempio n. 20
0
 def to_ldif(cls, data):
     """Convert list of dictionary to LDIF."""
     """See https://github.com/atterdag/ansible-filters-ldif for how to
        format dictionary.
     """
     if isinstance(data, list):
         try:
             ldif_data = StringIO()
             ldif_writer = LDIFWriter(ldif_data)
             for entry in data:
                 ldif_writer.unparse(str(entry[0]),
                                     cls.encode_values(dict(entry[1])))
             return ldif_data.getvalue()
         except Exception:
             raise AnsibleFilterError(
                 'Invalid input data for to_ldif filter (%s)' % data)
     else:
         raise AnsibleFilterError(
             'Input data to_ldif filter is not a list(%s)' % data)
Esempio n. 21
0
def generate_ldif(action, use_template=False, sync_source='MDM', **kwargs):
    """generate ldif string by kwargs or from template.
    Parameters:
        use_template: boolean,'CREATESESSION' and 'CLOSESESSION' should use template, others False.
        sync_source: 'MDM' or 'Directory'
        **kwargs: all items of dict.
    """
    output = StringIO()
    w = LDIFWriter(output)
    if use_template:
        if not templates.has_key(action):
            return ""

        d = templates[action]
        for i in d:
            w.unparse(*i)

        output.reset()
        r = output.read()
        output.close()

        if sync_source:
            r = r.format(sync_source=sync_source, current_time=datetime.today().strftime("%Y%m%d%H%M%SZ"), **kwargs)
        else:
            r = r.format(**kwargs)

        return r
    else:
        if not kwargs.has_key('dn'):
            output.close()
            return ""
        dn = kwargs.pop('dn')
        for k, v in kwargs.iteritems():
            if not isinstance(v, list):
                kwargs[k] = [v]

        w.unparse(dn, kwargs)

        output.reset()
        r = output.read()
        output.close()

        return r
Esempio n. 22
0
class ImportTransformer(LDIFParser):
    def __init__(self, f_import, f_outport):
        self.f_outport = f_outport
        self.writer = LDIFWriter(self.f_outport)
        super().__init__(f_import)

    def handle(self, dn, entry):
        attrs = entry.keys()
        # We don't know what form the keys/attrs are in
        # so we have to establish our own map of our
        # idea of these to the attrs idea.
        amap = dict([(x.lower(), x) for x in attrs])

        # Now we can do transforms
        # This has to exist ....
        oc_a = amap['objectclass']
        # If mo present, as nsMemberOf.
        try:
            mo_a = amap['memberof']
            # If mo_a was found, then mo is present, extend the oc.
            entry[oc_a] += [b'nsMemberOf']
        except:
            # Not found
            pass

        # strip entryCSN
        try:
            ecsn_a = amap['entrycsn']
            entry.pop(ecsn_a)
        except:
            # No ecsn, skip
            pass

        # strip sco
        try:
            sco_a = amap['structuralobjectclass']
            entry.pop(sco_a)
        except:
            # No sco, skip
            pass

        # Write it out
        self.writer.unparse(dn, entry)
Esempio n. 23
0
def dump_tree_sorted(ldap_server, base_domain, out_fil):
    diff_writer = SortedLdifWriter(LDIFWriter(out_fil))
    try:
        for dn, attrs in ldap_server.search_s(base_domain, ldap.SCOPE_SUBTREE):
            diff_writer.unparse(dn, attrs)
    except ldap.NO_SUCH_OBJECT:
        # if the ldap tree is empty, this will get thrown, in which
        # case we want an empty ldif, so...good!
        pass

    diff_writer.commit()
Esempio n. 24
0
def ldap_export():
    print(f"Exporting LDAP environment")
    try:
        all_entries = conn.search_s(search_base, search_scope)
        ldif_file = f"soca_export_{uuid.uuid4()}.ldif"
        ldif_content = []
        for entry in all_entries:
            print(f"Export {entry[0]}")
            ldif_content.append(entry)

        print(f"Creating LDIF: {ldif_file}")
        ldif_writer = LDIFWriter(open(ldif_file, "wb"))
        for content in ldif_content:
            dn = content[0]
            record = content[1]
            ldif_writer.unparse(dn, record)
    except Exception as err:
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        print(f"Unable to export LDAP environment due to {err}. {exc_type}, {fname}, {exc_tb.tb_lineno}")
        sys.exit(1)
Esempio n. 25
0
    class MyLDIF(LDIFParser):
        def __init__(self,input,output):
            LDIFParser.__init__(self,input)
            self.writer = LDIFWriter(output)

       # Encode special dn-specific backup logic here.
        def handle(self,dn,entry):
            if dn in make_modify:
                if not 'memberUid' in entry:
                    # No members in this group, discard
                    return

                members = entry['memberUid']
                self.writer.unparse(dn,[(ldap.MOD_REPLACE,'memberUid',members)])
                return
            elif dn in remove:
                return
            elif dn == None:
                return
            else:
                self.writer.unparse(dn,entry)
Esempio n. 26
0
    class MyLDIF(LDIFParser):
        def __init__(self,input,output):
            LDIFParser.__init__(self,input)
            self.writer = LDIFWriter(output)

       # Encode special dn-specific backup logic here.
        def handle(self,dn,entry):
            if dn in make_modify:
                if not 'memberUid' in entry:
                    # No members in this group, discard
                    return

                members = entry['memberUid']
                self.writer.unparse(dn,[(ldap.MOD_REPLACE,'memberUid',members)])
                return
            elif dn in remove:
                return
            elif dn == None:
                return
            else:
                self.writer.unparse(dn,entry)
Esempio n. 27
0
class FixLDIF(LDIFParser):
    def __init__(self, input, output):
        LDIFParser.__init__(self, input)
        self.writer = LDIFWriter(output)
    def handle(self, dn, entry):
        dn = self.fix_dn(dn)
        self.fix_entry(entry)
        self.fix_objectclass(entry['objectclass'])
        self.writer.unparse(dn, entry)
    def fix_dn(self, dn):
        head = dn.split(',', 1)[0]
        return head + ',' + basedn
    def fix_entry(self, entry):
        for value in ignore_attribute:
            if value in entry:
                del entry[value]
        for target, source in copy_attribute:
            entry[target] = entry[source]
    def fix_objectclass(self, objectclass):
        for value in ignore_objectclass:
            if value in objectclass:
                objectclass.remove(value)
Esempio n. 28
0
class ImportTransformer(LDIFParser):
    def __init__(self, f_import, f_outport, exclude_attributes_set):
        self.exclude_attributes_set = exclude_attributes_set
        self.f_outport = f_outport
        self.writer = LDIFWriter(self.f_outport)
        super().__init__(f_import)

    def handle(self, dn, entry):
        attrs = entry.keys()
        # We don't know what form the keys/attrs are in
        # so we have to establish our own map of our
        # idea of these to the attrs idea.
        amap = dict([(x.lower(), x) for x in attrs])

        # Now we can do transforms
        # This has to exist ....
        oc_a = amap['objectclass']
        # If mo present, as nsMemberOf.
        try:
            mo_a = amap['memberof']
            # If mo_a was found, then mo is present, extend the oc to make it valid.
            entry[oc_a] += [b'nsMemberOf']
        except:
            # Not found
            pass

        # Strip anything in the exclude set.
        for attr in self.exclude_attributes_set:
            try:
                ecsn_a = amap[attr]
                entry.pop(ecsn_a)
            except:
                # Not found, move on.
                pass

        # Write it out
        self.writer.unparse(dn, entry)
Esempio n. 29
0
def generate_ldif_from_list(action, array):
    """generate ldif string by array
    Parameters:
        array: a list contains several dicts which contains user or group info.
    """
    if isinstance(array, list):
        output = StringIO()
        w = LDIFWriter(output)
        for a in array:
            if a.has_key('dn'):
                dn = a.pop('dn')
                for k, v in a.iteritems():
                    if not isinstance(v, list):
                        a[k] = [v]

                w.unparse(dn, a)
            else:
                logger.error('the element of ldif does not have "dn": %s', a)

        output.reset()
        r = output.read()
        output.close()

        return r
Esempio n. 30
0
    def fixLdapBindDN(self):

        applience_fn = os.path.join(self.backupDir, 'ldif/appliance.ldif')
        parser = MyLDIF(open(applience_fn, 'rb'), None, True)
        parser.parse()
        tmp_fn = '/tmp/appliance.ldif'
        processed_fp = open(tmp_fn, 'w')
        ldif_writer = LDIFWriter(processed_fp)

        for dn, entry in parser.dn_entry:
            if 'oxIDPAuthentication' in entry:
                tmp_json = json.loads(entry['oxIDPAuthentication'][0])
                tmp_json['bindDN'] = 'cn=Directory Manager'
                tmp_config = json.loads(tmp_json['config'])
                tmp_config['bindDN'] = 'cn=Directory Manager'
                tmp_json['config'] = json.dumps(tmp_config)
                entry['oxIDPAuthentication'] = [json.dumps(tmp_json)]

            ldif_writer.unparse(dn, entry)

        processed_fp.close()
        os.system('cp {0} {1}'.format(tmp_fn, applience_fn))
        os.remove(tmp_fn)

        oxtrust_config_fn = os.path.join(self.backupDir,
                                         'ldif/oxtrust_config.ldif')
        parser = MyLDIF(open(oxtrust_config_fn, 'rb'), None, True)
        parser.parse()
        tmp_fn = '/tmp/oxtrust_config.ldif'
        processed_fp = open(tmp_fn, 'w')
        ldif_writer = LDIFWriter(processed_fp)

        for dn, entry in parser.dn_entry:
            if 'oxTrustConfCacheRefresh' in entry:
                tmp_json = json.loads(entry['oxTrustConfCacheRefresh'][0])
                tmp_json['inumConfig']['bindDN'] = 'cn=Directory Manager'
                entry['oxTrustConfCacheRefresh'] = [json.dumps(tmp_json)]

            ldif_writer.unparse(dn, entry)

        processed_fp.close()
        os.system('cp {0} {1}'.format(tmp_fn, oxtrust_config_fn))
        os.remove(tmp_fn)
Esempio n. 31
0
def generate_ldif(action, use_template=False, sync_source='MDM', **kwargs):
    """generate ldif string by kwargs or from template.
    Parameters:
        use_template: boolean,'CREATESESSION' and 'CLOSESESSION' should use template, others False.
        sync_source: 'MDM' or 'Directory'
        **kwargs: all items of dict.
    """
    output = StringIO()
    w = LDIFWriter(output)
    if use_template:
        if not templates.has_key(action):
            return ""

        d = templates[action]
        for i in d:
            w.unparse(*i)

        output.reset()
        r = output.read()
        output.close()

        if sync_source:
            r = r.format(
                sync_source=sync_source,
                current_time=datetime.today().strftime("%Y%m%d%H%M%SZ"),
                **kwargs)
        else:
            r = r.format(**kwargs)

        return r
    else:
        if not kwargs.has_key('dn'):
            output.close()
            return ""
        dn = kwargs.pop('dn')
        for k, v in kwargs.iteritems():
            if not isinstance(v, list):
                kwargs[k] = [v]

        w.unparse(dn, kwargs)

        output.reset()
        r = output.read()
        output.close()

        return r
Esempio n. 32
0
def action_dump(conn, basedn, shorten=True, rewrite_dn=True):
    writer = LDIFWriter(sys.stdout)
    try:
        for dn, attrs in conn.search_s(basedn, ldap.SCOPE_SUBTREE):
            if rewrite_dn:
                dn = (dn.decode("utf-8").replace(
                    basedn, "dc=unified,dc=base,dc=dn").encode("utf-8"))
            if shorten:
                attrs = {
                    k: [abbrev_value(v) for v in vals]
                    for k, vals in attrs.iteritems()
                }
            try:
                writer.unparse(dn, attrs)
            except UnicodeDecodeError:
                writer.unparse(dn.decode("utf-8"), attrs)
    except ldap.NO_SUCH_OBJECT:
        print("No object '%s' in directory." % basedn, file=sys.stderr)
        sys.exit(1)
    def fixLdapBindDN(self):
        
        applience_fn = os.path.join(self.backupDir, 'ldif/appliance.ldif')
        parser = MyLDIF(open(applience_fn, 'rb'), None, True)
        parser.parse()
        tmp_fn = '/tmp/appliance.ldif'
        processed_fp = open(tmp_fn, 'w')
        ldif_writer = LDIFWriter(processed_fp)
        
        for dn, entry in parser.dn_entry:
            if 'oxIDPAuthentication' in entry:
                tmp_json = json.loads(entry['oxIDPAuthentication'][0])
                tmp_config = json.loads(tmp_json['config'])
                tmp_config['bindDN'] = 'cn=Directory Manager'
                tmp_json['config'] = json.dumps(tmp_config)
                entry['oxIDPAuthentication'] = [json.dumps(tmp_json)]

            ldif_writer.unparse(dn, entry)
        
        processed_fp.close()
        os.system('cp {0} {1}'.format(tmp_fn, applience_fn))
        os.remove(tmp_fn)
        
        
        oxtrust_config_fn = os.path.join(self.backupDir, 'ldif/oxtrust_config.ldif')
        parser = MyLDIF(open(oxtrust_config_fn, 'rb'), None, True)
        parser.parse()
        tmp_fn = '/tmp/oxtrust_config.ldif'
        processed_fp = open(tmp_fn, 'w')
        ldif_writer = LDIFWriter(processed_fp)
        
        for dn, entry in parser.dn_entry:
            if 'oxTrustConfCacheRefresh' in entry:
                tmp_json = json.loads(entry['oxTrustConfCacheRefresh'][0])
                tmp_json['inumConfig']['bindDN'] = 'cn=Directory Manager'
                entry['oxTrustConfCacheRefresh'] = [json.dumps(tmp_json)]

            ldif_writer.unparse(dn, entry)
        
        processed_fp.close()
        os.system('cp {0} {1}'.format(tmp_fn, oxtrust_config_fn))
        os.remove(tmp_fn)
class ActiveDirectoryToOpenLdapLDIFConvertor(LDIFParser):
    objectclassAddsBasedOnDN = {
        'CN=ExchangeActiveSyncDevices': 'exchangeActiveSyncDevices'
    }

    objectclassChangesBasedOnDN = {
        'CN=_Template ': {
            'user': '******'
        },
        'CN=_Template_': {
            'user': '******'
        },
        'CN=_Template\, ': {
            'user': '******'
        }
    }

    objectclassMappings = {
        'top': 'mstop',
        'user': '******',
        'group': 'customActiveDirectoryGroup',
        'contact': 'customActiveDirectoryContact'
    }

    attributetypesValuesDuplicates = ['dSCorePropagationData']

    def __init__(self, input, output):
        LDIFParser.__init__(self, input)
        self.writer = LDIFWriter(output)

    def addObjectclassesBasedOnDN(self, dn, entry):
        for objAdd in self.objectclassAddsBasedOnDN:
            if objAdd.lower() in dn.lower():  # case insensitive match
                if 'objectClass' not in entry.keys():
                    entry['objectClass'] = []
                entry['objectClass'].append(
                    self.objectclassAddsBasedOnDN[objAdd])

    def changeObjectclassesBasedOnDN(self, dn, entry):
        if 'objectClass' not in entry.keys():
            return
        for objChange in self.objectclassChangesBasedOnDN:
            if objChange.lower() in dn.lower():  # case insensitive match
                for objSource in self.objectclassChangesBasedOnDN[objChange]:
                    index = 0
                    for objTarget in entry['objectClass']:
                        if objSource == objTarget:
                            entry['objectClass'][
                                index] = self.objectclassChangesBasedOnDN[
                                    objChange][objSource]
                        index += 1

    def changeObjectclasses(self, dn, entry):
        if 'objectClass' in entry.keys():
            index = 0
            for objectclass in entry['objectClass']:
                for objMap in self.objectclassMappings:
                    if objMap == objectclass:
                        entry['objectClass'][index] = self.objectclassMappings[
                            objMap]
                index += 1

    def removeDuplicateAttributeValues(self, dn, entry):
        for attributetype in self.attributetypesValuesDuplicates:
            if attributetype in entry.keys():
                entry[attributetype] = list(set(entry[attributetype]))

    def handle(self, dn, entry):
        self.addObjectclassesBasedOnDN(dn, entry)
        self.changeObjectclassesBasedOnDN(dn, entry)
        self.changeObjectclasses(dn, entry)
        self.removeDuplicateAttributeValues(dn, entry)
        self.writer.unparse(dn, entry)
Esempio n. 35
0
 def __init__(self):
     self.global_objs = {}
     self.ldif = LDIFWriter(sys.stdout)
Esempio n. 36
0
 def __init__(self, input, output):
     LDIFParser.__init__(self, input)
     self.writer = LDIFWriter(output)
Esempio n. 37
0
        return True

    @adminConnectLdap
    def fullElementDNtoText(self, relDN="", ldapFilter='(objectclass=*)'):
        """Выводит все внутренние элементы DN виде текста"""
        DN = self.addDN(relDN, self.baseDN)
        listDN=[]
        try:
            dnList = self.conLdap.search_s(DN,
                                            SCOPE_SUBTREE,
                                            ldapFilter,None)
        except LDAPError, e:
            self.printERROR("fullElementDN: "+e[0]['desc'])
            return False
        FDOUT = StringIO.StringIO("")
        writer = LDIFWriter(FDOUT)
        for dn, f in dnList:
            writer.unparse(dn, f)
        FDOUT.seek(0)
        return FDOUT.read()

    @adminConnectLdap
    def fullElementSambaDNtoText(self, relDN=""):
        """Выводит все внутренние элементы ветки Samba в виде текста"""
        return self.fullElementDNtoText(relDN,'(|(|(|(|(ou:dn:=Samba)\
(ou:dn:=Unix))(ou:dn:=LDAP))(!(ou:dn:=Services)))(ou=Services))')

    @adminConnectLdap
    def fullElementUnixDNtoText(self, relDN=""):
        """Выводит все внутренние элементы ветки Unix в виде текста"""
        return self.fullElementDNtoText(relDN,'(|(|(|(ou:dn:=Unix)\
    def processBackupData(self):
        logging.info('Processing the LDIF data.')

        attrib_dn = "inum={0}!0005!D2E0,ou=attributes,o={0},o=gluu".format(self.inumOrg)

        processed_fp = open(self.processTempFile, 'w')

        ldif_writer = LDIFWriter(processed_fp)


        # Determine current primary key
        
        appliences = MyLDIF(open(os.path.join(self.backupDir, 'ldif','appliance.ldif'), 'rb'), None)
        appliences.parse()
        
        for entry in appliences.entries:
            if 'oxIDPAuthentication' in entry:
                oxIDPAuthentication = json.loads(entry['oxIDPAuthentication'][0])
                idp_config = json.loads(oxIDPAuthentication['config'])
                primaryKey = idp_config['primaryKey']
                localPrimaryKey = idp_config['localPrimaryKey']

        currentDNs = self.getDns(self.currentData)
        old_dn_map = self.getOldEntryMap()

        ignoreList = ['objectClass', 'ou', 'oxIDPAuthentication',
                      'gluuFreeMemory', 'gluuSystemUptime',
                      'oxLogViewerConfig', 'gluuLastUpdate']
        multivalueAttrs = ['oxTrustEmail', 'oxTrustPhoneValue', 'oxTrustImsValue',
                           'oxTrustPhotos', 'oxTrustAddresses', 'oxTrustRole',
                           'oxTrustEntitlements', 'oxTrustx509Certificate']

        if self.oxIDPAuthentication == 1:
            ignoreList.remove('oxIDPAuthentication')

        # Rewriting all the new DNs in the new installation to ldif file
        nodn=len(currentDNs)
        for cnt, dn in enumerate(currentDNs):
            progress_bar(cnt, nodn, 'Rewriting DNs')
            new_entry = self.getEntry(self.currentData, dn)

            if 'ou=appliances' in dn:
                if 'oxIDPAuthentication' in new_entry:
                    oxIDPAuthentication = json.loads(new_entry['oxIDPAuthentication'][0])
                    idp_config = json.loads(oxIDPAuthentication['config'])
                    idp_config['primaryKey'] = primaryKey
                    idp_config['localPrimaryKey'] = localPrimaryKey
                    oxIDPAuthentication['config'] = json.dumps(idp_config)
                    new_entry['oxIDPAuthentication'] = [ json.dumps(oxIDPAuthentication) ]

            if "o=site" in dn:
                continue  # skip all the o=site DNs
            if dn not in old_dn_map.keys():
                #  Write to the file if there is no matching old DN data
                ldif_writer.unparse(dn, new_entry)
                continue

            old_entry = self.getEntry(os.path.join(self.ldifDir, old_dn_map[dn]), dn)

            for attr in old_entry.keys():
                if attr in ignoreList:
                    continue

                if attr not in new_entry:
                    new_entry[attr] = old_entry[attr]
                elif old_entry[attr] != new_entry[attr]:
                    if len(old_entry[attr]) == 1:
                        try:
                            old_json = json.loads(old_entry[attr][0])
                            new_json = json.loads(new_entry[attr][0])
                            new_json = merge(new_json, old_json)
                            new_entry[attr] = [json.dumps(new_json)]
                        except:
                            if attr == 'oxScript':
                                new_entry[attr] = new_entry[attr]
                                logging.debug("Keeping new value for %s", attr)
                            else:
                                new_entry[attr] = old_entry[attr]
                                logging.debug("Keeping old value for %s", attr)
                    else:
                        new_entry[attr] = old_entry[attr]
                        logging.debug("Keep multiple old values for %s", attr)
                        
            
            ldif_writer.unparse(dn, new_entry)
        
        progress_bar(0, 0, 'Rewriting DNs', True)
        
        # Pick all the left out DNs from the old DN map and write them to the LDIF
        nodn = len(old_dn_map)
        
        ldif_shelve_dict = {}
        
        sector_identifiers = 'ou=sector_identifiers,o={},o=gluu'.format(self.inumOrg)

        for cnt, dn in enumerate(sorted(old_dn_map, key=len)):
            progress_bar(cnt, nodn, 'Perapring DNs for ' + self.oxVersion)
            if "o=site" in dn:
                continue  # skip all the o=site DNs
            if dn in currentDNs:
                continue  # Already processed

            cur_ldif_file = old_dn_map[dn]

            if not cur_ldif_file in ldif_shelve_dict:
                sdb=DBLDIF(os.path.join(self.ldifDir, cur_ldif_file))
                sdb.parse()
                ldif_shelve_dict[cur_ldif_file]=sdb.sdb

            entry = ldif_shelve_dict[cur_ldif_file][str(dn)]


            for attr in entry.keys():
                if attr not in multivalueAttrs:
                    continue  # skip conversion

                attr_values = []
                for val in entry[attr]:
                    json_value = None
                    try:
                        json_value = json.loads(val)
                        if type(json_value) is list:
                            attr_values.extend([json.dumps(v) for v in json_value])
                    except:
                        logging.debug('Cannot parse multival %s in DN %s', attr, dn)
                        attr_values.append(val)
                entry[attr] = attr_values
                

            if '3.1.3' in self.oxVersion:

                if dn == attrib_dn:
                    if 'oxAuthClaimName' in entry and not 'member_off' in entry['oxAuthClaimName']:
                        entry['oxAuthClaimName'].append('member_off')
                    else:
                        entry['oxAuthClaimName'] = ['member_off']

                if sector_identifiers in dn:
                    if dn.startswith('inum'):
                        
                        dn = dn.replace('inum=', 'oxId=')
                        oxId = entry['inum'][:]
                        entry['oxId'] = oxId
                        del entry['inum']
                        
                if 'ou=clients' in dn:
                    if ('oxAuthGrantType' not in entry) or ('oxauthgranttype' not in entry):
                        entry['oxAuthGrantType'] = ['authorization_code']
                
            
            ldif_writer.unparse(dn, entry)

        # Finally
        processed_fp.close()

        progress_bar(0, 0, 'Perapring DNs for ' + self.oxVersion, True)

        nodn = sum(1 for line in open(self.processTempFile))

        # Update the Schema change for lastModifiedTime
        with open(self.processTempFile, 'r') as infile:
            with open(self.o_gluu, 'w') as outfile:
                for cnt, line in enumerate(infile):
                    progress_bar(cnt, nodn, 'converting Dns')
                    line = line.replace("lastModifiedTime", "oxLastAccessTime")
                    line = line.replace('oxAuthUmaResourceSet', 'oxUmaResource')
                    #if 'oxTrustAuthenticationMode' in line:
                    #    line = line.replace('internal', 'auth_ldap_server')
                    if line.startswith('oxAuthenticationMode'):
                         line = 'oxAuthenticationMode: auth_ldap_server\n'
                    elif line.startswith('oxTrustAuthenticationMode'):
                         line = 'oxTrustAuthenticationMode: auth_ldap_server\n'
                    if 'oxAuthAuthenticationTime' in line:
                        line = self.convertTimeStamp(line)
                    if line.startswith('oxMemcachedConfiguration:') or line.startswith('oxCacheConfiguration:'):
                        line = 'oxCacheConfiguration: {"cacheProviderType":"IN_MEMORY","memcachedConfiguration":{"servers":"localhost:11211","maxOperationQueueLength":100000,"bufferSize":32768,"defaultPutExpiration":60,"connectionFactoryType":"DEFAULT"},"inMemoryConfiguration":{"defaultPutExpiration":60},"redisConfiguration":{"redisProviderType":"STANDALONE","servers":"localhost:6379","defaultPutExpiration":60}}'


                    if ("objectClass:" in line and line.split("objectClass: ")[1][:3] == 'ox-'):
                        line = line.replace(line, 'objectClass: gluuCustomPerson' + '\n')
                    if 'oxType' not in line and 'gluuVdsCacheRefreshLastUpdate' not in line and 'objectClass: person' not in line and 'objectClass: organizationalPerson' not in line and 'objectClass: inetOrgPerson' not in line:
                        outfile.write(line)



                    # parser = MyLDIF(open(self.currentData, 'rb'), sys.stdout)
                    # atr = parser.parse()
                    base64Types = [""]
                    # for idx, val in enumerate(parser.entries):
                    # if 'displayName' in val:
                    #     if val['displayName'][0] == 'SCIM Resource Set':
                    #         out = CreateLDIF(parser.getDNs()[idx], val,
                    #                          base64_attrs=base64Types)
                    #         f = open(self.o_gluu, "a")
                    #         f.write('\n')
                    #         f.write(out)
        
        progress_bar(0, 0, 'converting Dns', True)
        
        data="".join(open( os.path.join(self.backupDir, 'ldif','site.ldif')).readlines()[4:-1])
        open(os.path.join(self.backupDir, 'ldif','site.ldif'),"wb").write(data)
        filenames = [self.o_site_static, os.path.join(self.backupDir, 'ldif','site.ldif')]
        with open(self.o_site, 'w') as outfile:
            for fname in filenames:
                with open(fname) as infile:
                    for line in infile:
                        outfile.write(line)
 def print_nodes(self, stream):
     ldif_writer = LDIFWriter(stream, cols=512)
     for node in self.nodes:
         ldif_writer.unparse(node["dn"], node["entries"])
     return self
Esempio n. 40
0
    def execute(self, **options):
        ldap = self.api.Backend.ldap2
        # check LDAP if forwardzones already uses new semantics
        dns_container_dn = DN(self.api.env.container_dns, self.api.env.basedn)
        try:
            container_entry = ldap.get_entry(dns_container_dn)
        except errors.NotFound:
            # DNS container not found, nothing to upgrade
            return False, []

        for config_option in container_entry.get("ipaConfigString", []):
            matched = re.match("^DNSVersion\s+(?P<version>\d+)$",
                               config_option, flags=re.I)
            if matched and int(matched.group("version")) >= 1:
                # forwardzones already uses new semantics,
                # no upgrade is required
                return False, []

        self.log.debug('Updating forward zones')
        # update the DNSVersion, following upgrade can be executed only once
        container_entry.setdefault(
            'ipaConfigString', []).append(u"DNSVersion 1")
        ldap.update_entry(container_entry)

        # Updater in IPA version from 4.0 to 4.1.2 doesn't work well, this
        # should detect if update in past has been executed, and set proper
        # DNSVersion into LDAP
        try:
            fwzones = self.api.Command.dnsforwardzone_find()['result']
        except errors.NotFound:
            # No forwardzones found, update probably has not been executed yet
            pass
        else:
            if fwzones:
                # fwzones exist, do not execute upgrade again
                return False, []

        zones = []
        try:
            # raw values are required to store into ldif
            zones = self.api.Command.dnszone_find(all=True,
                                             raw=True,
                                             sizelimit=0)['result']
        except errors.NotFound:
            pass

        if not zones:
            self.log.debug('No DNS zone to update found')
            return False, []

        zones_to_transform = []

        for zone in zones:
            if (
                zone.get('idnsforwardpolicy', [u'first'])[0] == u'none' or
                zone.get('idnsforwarders', []) == []
            ):
                continue  # don't update zone

            zones_to_transform.append(zone)

        if zones_to_transform:
            # add time to filename
            self.backup_path = time.strftime(self.backup_path)

            # DNs of privileges which contain dns managed permissions
            privileges_to_ldif = set()  # store priviledges only once
            zone_to_privileges = {}  # zone: [privileges cn]

            self.log.info('Zones with specified forwarders with policy different'
                          ' than none will be transformed to forward zones.')
            self.log.info('Original zones will be saved in LDIF format in '
                          '%s file' % self.backup_path)
            try:

                with open(self.backup_path, 'w') as f:
                    writer = LDIFWriter(f)
                    for zone in zones_to_transform:
                        # save backup to ldif
                        try:

                            dn = str(zone['dn'])
                            del zone['dn']  # dn shouldn't be as attribute in ldif
                            writer.unparse(dn, zone)

                            if 'managedBy' in zone:
                                entry = ldap.get_entry(DN(zone['managedBy'][0]))
                                for privilege_member_dn in entry.get('member', []):
                                    privileges_to_ldif.add(privilege_member_dn)
                                writer.unparse(str(entry.dn), dict(entry.raw))

                                # privileges where permission is used
                                if entry.get('member'):
                                    zone_to_privileges[zone['idnsname'][0]] = entry['member']

                            # raw values are required to store into ldif
                            records = self.api.Command['dnsrecord_find'](
                                        zone['idnsname'][0],
                                        all=True,
                                        raw=True,
                                        sizelimit=0)['result']
                            for record in records:
                                if record['idnsname'][0] == u'@':
                                    # zone record was saved before
                                    continue
                                dn = str(record['dn'])
                                del record['dn']
                                writer.unparse(dn, record)

                        except Exception, e:
                            self.log.error('Unable to backup zone %s' %
                                           zone['idnsname'][0])
                            self.log.error(traceback.format_exc())
                            return False, []

                    for privilege_dn in privileges_to_ldif:
                        try:
                            entry = ldap.get_entry(privilege_dn)
                            writer.unparse(str(entry.dn), dict(entry.raw))
                        except Exception, e:
                            self.log.error('Unable to backup privilege %s' %
                                           privilege_dn)
                            self.log.error(traceback.format_exc())
                            return False, []

                    f.close()
Esempio n. 41
0
 def print_nodes(self, stream):
     ldif_writer = LDIFWriter(stream, cols=512)
     for node in self.nodes:
         ldif_writer.unparse(node["dn"], node["entries"])
     return self
Esempio n. 42
0
    def processBackupData(self):
        logging.info('Processing the LDIF data.')

        processed_fp = open(self.processTempFile, 'w')
        ldif_writer = LDIFWriter(processed_fp)

        currentDNs = self.getDns(self.currentData)
        old_dn_map = self.getOldEntryMap()

        ignoreList = [
            'objectClass', 'ou', 'oxIDPAuthentication', 'gluuFreeMemory',
            'gluuSystemUptime', 'oxLogViewerConfig', 'gluuLastUpdate'
        ]
        multivalueAttrs = [
            'oxTrustEmail', 'oxTrustPhoneValue', 'oxTrustImsValue',
            'oxTrustPhotos', 'oxTrustAddresses', 'oxTrustRole',
            'oxTrustEntitlements', 'oxTrustx509Certificate'
        ]

        if self.oxIDPAuthentication == 1:
            ignoreList.remove('oxIDPAuthentication')

        # Rewriting all the new DNs in the new installation to ldif file
        for dn in currentDNs:
            new_entry = self.getEntry(self.currentData, dn)
            if "o=site" in dn:
                continue  # skip all the o=site DNs
            if dn not in old_dn_map.keys():
                #  Write to the file if there is no matching old DN data
                ldif_writer.unparse(dn, new_entry)
                continue

            old_entry = self.getEntry(
                os.path.join(self.ldifDir, old_dn_map[dn]), dn)
            for attr in old_entry.keys():
                if attr in ignoreList:
                    continue

                if attr not in new_entry:
                    new_entry[attr] = old_entry[attr]
                elif old_entry[attr] != new_entry[attr]:
                    if len(old_entry[attr]) == 1:
                        try:
                            old_json = json.loads(old_entry[attr][0])
                            new_json = json.loads(new_entry[attr][0])
                            new_json = merge(new_json, old_json)
                            new_entry[attr] = [json.dumps(new_json)]
                        except:
                            if attr == 'oxScript':
                                new_entry[attr] = new_entry[attr]
                                logging.debug("Keeping new value for %s", attr)
                            else:
                                new_entry[attr] = old_entry[attr]
                                logging.debug("Keeping old value for %s", attr)
                    else:
                        new_entry[attr] = old_entry[attr]
                        logging.debug("Keep multiple old values for %s", attr)
            ldif_writer.unparse(dn, new_entry)

        # Pick all the left out DNs from the old DN map and write them to the LDIF
        for dn in sorted(old_dn_map, key=len):
            if "o=site" in dn:
                continue  # skip all the o=site DNs
            if dn in currentDNs:
                continue  # Already processed

            entry = self.getEntry(os.path.join(self.ldifDir, old_dn_map[dn]),
                                  dn)

            for attr in entry.keys():
                if attr not in multivalueAttrs:
                    continue  # skip conversion

                attr_values = []
                for val in entry[attr]:
                    json_value = None
                    try:
                        json_value = json.loads(val)
                        if type(json_value) is list:
                            attr_values.extend(
                                [json.dumps(v) for v in json_value])
                    except:
                        logging.debug('Cannot parse multival %s in DN %s',
                                      attr, dn)
                        attr_values.append(val)
                entry[attr] = attr_values

            ldif_writer.unparse(dn, entry)

        # Finally
        processed_fp.close()

        # Update the Schema change for lastModifiedTime
        with open(self.processTempFile, 'r') as infile:
            with open(self.o_gluu, 'w') as outfile:
                for line in infile:
                    line = line.replace("lastModifiedTime", "oxLastAccessTime")
                    line = line.replace('oxAuthUmaResourceSet',
                                        'oxUmaResource')
                    if 'oxTrustAuthenticationMode' in line:
                        line = line.replace('internal', 'auth_ldap_server')
                    if 'oxAuthAuthenticationTime' in line:
                        line = self.convertTimeStamp(line)
                    if ("objectClass:" in line
                            and line.split("objectClass: ")[1][:3] == 'ox-'):
                        line = line.replace(
                            line, 'objectClass: gluuCustomPerson' + '\n')
                    if 'oxType' not in line and 'gluuVdsCacheRefreshLastUpdate' not in line and 'objectClass: person' not in line and 'objectClass: organizationalPerson' not in line and 'objectClass: inetOrgPerson' not in line:
                        outfile.write(line)
                    # parser = MyLDIF(open(self.currentData, 'rb'), sys.stdout)
                    # atr = parser.parse()
                    base64Types = [""]
                    # for idx, val in enumerate(parser.entries):
                    # if 'displayName' in val:
                    #     if val['displayName'][0] == 'SCIM Resource Set':
                    #         out = CreateLDIF(parser.getDNs()[idx], val,
                    #                          base64_attrs=base64Types)
                    #         f = open(self.o_gluu, "a")
                    #         f.write('\n')
                    #         f.write(out)
        data = "".join(
            open(os.path.join(self.backupDir, 'ldif',
                              'site.ldif')).readlines()[4:-1])
        open(os.path.join(self.backupDir, 'ldif', 'site.ldif'),
             "wb").write(data)
        filenames = [
            self.o_site_static,
            os.path.join(self.backupDir, 'ldif', 'site.ldif')
        ]
        with open(self.o_site, 'w') as outfile:
            for fname in filenames:
                with open(fname) as infile:
                    for line in infile:
                        outfile.write(line)
Esempio n. 43
0
    def processBackupData(self):
        logging.info('Processing the LDIF data.')

        processed_fp = open(self.processTempFile, 'w')
        ldif_writer = LDIFWriter(processed_fp)

        currentDNs = self.getDns(self.currentData)
        old_dn_map = self.getOldEntryMap()

        ignoreList = ['objectClass', 'ou', 'oxAuthJwks', 'oxAuthConfWebKeys']
        multivalueAttrs = ['oxTrustEmail', 'oxTrustPhoneValue', 'oxTrustImsValue',
                           'oxTrustPhotos', 'oxTrustAddresses', 'oxTrustRole',
                           'oxTrustEntitlements', 'oxTrustx509Certificate']

        # Rewriting all the new DNs in the new installation to ldif file
        for dn in currentDNs:
            new_entry = self.getEntry(self.currentData, dn)
            if "o=site" in dn:
                continue  # skip all the o=site DNs
            elif dn not in old_dn_map.keys():
                #  Write to the file if there is no matching old DN data
                ldif_writer.unparse(dn, new_entry)
                continue

            old_entry = self.getEntry(os.path.join(self.ldifDir, old_dn_map[dn]), dn)
            for attr in old_entry.keys():
                if attr in ignoreList:
                    continue

                if attr not in new_entry:
                    new_entry[attr] = old_entry[attr]
                elif old_entry[attr] != new_entry[attr]:
                    if len(old_entry[attr]) == 1:
                        try:
                            old_json = json.loads(old_entry[attr][0])
                            new_json = json.loads(new_entry[attr][0])
                            new_json = merge(new_json, old_json)
                            new_entry[attr] = [json.dumps(new_json)]
                        except:
                            new_entry[attr] = old_entry[attr]
                            logging.debug("Keeping old value for %s", attr)
                    else:
                        new_entry[attr] = old_entry[attr]
                        logging.debug("Keep multiple old values for %s", attr)
            ldif_writer.unparse(dn, new_entry)

        # Pick all the left out DNs from the old DN map and write them to the LDIF
        for dn in sorted(old_dn_map, key=len):
            if dn in currentDNs:
                continue  # Already processed

            entry = self.getEntry(os.path.join(self.ldifDir, old_dn_map[dn]), dn)

            for attr in entry.keys():
                if attr not in multivalueAttrs:
                    continue  # skip conversion

                attr_values = []
                for val in entry[attr]:
                    json_value = None
                    try:
                        json_value = json.loads(val)
                        if type(json_value) is list:
                            attr_values.extend([json.dumps(v) for v in json_value])
                    except:
                        logging.debug('Cannot parse multival %s in DN %s', attr, dn)
                        attr_values.append(val)
                entry[attr] = attr_values

            ldif_writer.unparse(dn, entry)

        # Finally
        processed_fp.close()

        # Update the Schema change for lastModifiedTime
        with open(self.processTempFile, 'r') as infile:
            with open(self.o_gluu, 'w') as outfile:
                for line in infile:
                    line.replace("lastModifiedTime", "oxLastAccessTime")
                    line.replace("cn=directory manager", "cn=directory manager,o=gluu")
                    outfile.write(line)
Esempio n. 44
0
# Simple script to pull differences between LDIF data. Used mostly as a sanity check for data.

import sys
from ldif import LDIFParser, LDIFWriter

file1 = "idp1.ldif"
file2 = "idp2.ldif"

ld1 = ldif.LDIFRecordList.parse(file1)
ld2 = ldif.LDIFRecordList.parse(file2)

for line in ld1:
    if line in ld2:
        continue
    elif line not in ld2:
        print(LDIFWriter.unparse(line))
 def __init__(self):
     self.sio = StringIO()
     self.ldif_writer = LDIFWriter(self.sio)
Esempio n. 46
0
 def as_ldif(self):
     stream = StringIO()
     output = LDIFWriter(stream)
     output.unparse(self.dn, self.props)
     return stream.getvalue()
Esempio n. 47
0
 def __init__(self, f_import, f_outport, exclude_attributes_set):
     self.exclude_attributes_set = exclude_attributes_set
     self.f_outport = f_outport
     self.writer = LDIFWriter(self.f_outport)
     super().__init__(f_import)
    def processBackupData(self):
        logging.info('Processing the LDIF data.')

        processed_fp = open(self.processTempFile, 'w')
        ldif_writer = LDIFWriter(processed_fp)

        currentDNs = self.getDns(self.currentData)
        old_dn_map = self.getOldEntryMap()

        ignoreList = [
            'objectClass', 'ou', 'oxIDPAuthentication', 'gluuFreeMemory',
            'gluuSystemUptime', 'oxLogViewerConfig', 'gluuLastUpdate'
        ]
        multivalueAttrs = [
            'oxTrustEmail', 'oxTrustPhoneValue', 'oxTrustImsValue',
            'oxTrustPhotos', 'oxTrustAddresses', 'oxTrustRole',
            'oxTrustEntitlements', 'oxTrustx509Certificate'
        ]

        # Rewriting all the new DNs in the new installation to ldif file
        for dn in currentDNs:
            new_entry = self.getEntry(self.currentData, dn)
            if "o=site" in dn:
                continue  # skip all the o=site DNs
            elif dn not in old_dn_map.keys():
                #  Write to the file if there is no matching old DN data
                ldif_writer.unparse(dn, new_entry)
                continue

            old_entry = self.getEntry(
                os.path.join(self.ldifDir, old_dn_map[dn]), dn)
            for attr in old_entry.keys():
                if attr in ignoreList:
                    continue

                if attr not in new_entry:
                    new_entry[attr] = old_entry[attr]
                elif old_entry[attr] != new_entry[attr]:
                    if len(old_entry[attr]) == 1:
                        try:
                            old_json = json.loads(old_entry[attr][0])
                            new_json = json.loads(new_entry[attr][0])
                            new_json = merge(new_json, old_json)
                            new_entry[attr] = [json.dumps(new_json)]
                        except:
                            new_entry[attr] = old_entry[attr]
                            logging.debug("Keeping old value for %s", attr)
                    else:
                        new_entry[attr] = old_entry[attr]
                        logging.debug("Keep multiple old values for %s", attr)
            ldif_writer.unparse(dn, new_entry)

        # Pick all the left out DNs from the old DN map and write them to the LDIF
        for dn in sorted(old_dn_map, key=len):
            if dn in currentDNs:
                continue  # Already processed

            entry = self.getEntry(os.path.join(self.ldifDir, old_dn_map[dn]),
                                  dn)

            for attr in entry.keys():
                if attr not in multivalueAttrs:
                    continue  # skip conversion

                attr_values = []
                for val in entry[attr]:
                    json_value = None
                    try:
                        json_value = json.loads(val)
                        if type(json_value) is list:
                            attr_values.extend(
                                [json.dumps(v) for v in json_value])
                    except:
                        logging.debug('Cannot parse multival %s in DN %s',
                                      attr, dn)
                        attr_values.append(val)
                entry[attr] = attr_values

            ldif_writer.unparse(dn, entry)

        # Finally
        processed_fp.close()

        # Update the Schema change for lastModifiedTime
        with open(self.processTempFile, 'r') as infile:
            with open(self.o_gluu, 'w') as outfile:
                for line in infile:
                    line = line.replace("lastModifiedTime", "oxLastAccessTime")
                    if "cn=directory manager" in line and "cn=directory manager,o=gluu" not in line:
                        line = line.replace("cn=directory manager",
                                            "cn=directory manager,o=gluu")
                    if 'oxTrustAuthenticationMode' in line:
                        line = line.replace('internal', 'auth_ldap_server')
                    if 'oxAuthAuthenticationTime' in line:
                        line = self.convertTimeStamp(line)
                    outfile.write(line)
Esempio n. 49
0
	def __init__(self, output):
		self.writer = LDIFWriter(output)
 def __init__(self, input, output):
     LDIFParser.__init__(self, input)
     self.writer = LDIFWriter(output)
Esempio n. 51
0
class Unparser():
	def __init__(self, output):
		self.writer = LDIFWriter(output)

	def write(self, dn, entry_or_modlist):
		self.writer.unparse(dn, entry_or_modlist)
Esempio n. 52
0
 def __init__(self):
     self.global_objs = {}
     self.ldif = LDIFWriter(sys.stdout)
                self.inumOrg = dne[0][0][1]

            if not self.inumApllience and 'gluuAppliance' in entry['objectClass']:
                self.inumApllience_dn = dn
                dne = str2dn(dn)
                self.inumApllience = dne[0][0][1]


ldif_parser = MyLDIF(open('gluu.ldif'))
ldif_parser.parse()

inumOrg_ou = 'o=' + ldif_parser.inumOrg
inumApllience_inum = 'inum='+ ldif_parser.inumApllience

processed_fp = open('gluu_noinum.ldif','w')
ldif_writer = LDIFWriter(processed_fp)


def checkIfAsimbaEntry(new_entry):
    for objCls in ('oxAsimbaConfiguration', 'oxAsimbaIDP', 
            'oxAsimbaRequestorPool', 'oxAsimbaSPRequestor', 
            'oxAsimbaSelector'):
        if objCls in new_entry['objectClass']:
            return True

for dn in ldif_parser.DNs:
    
    dne = explode_dn(dn)

    new_entry = ldif_parser.entries[dn]
def processLDIF(backupFolder, newFolder):
    logging.info('Processing the LDIF data')
    current_ldif = os.path.join(newFolder, 'current.ldif')
    currentDNs = getDns(current_ldif)

    processed_ldif = open(os.path.join(newFolder, 'processed.ldif'), 'w')
    ldif_writer = LDIFWriter(processed_ldif)

    ignoreList = ['objectClass', 'ou', 'oxAuthJwks', 'oxAuthConfWebKeys']
    old_dn_map = getOldEntryMap(backupFolder)

    multivalueAttrs = ['oxTrustEmail', 'oxTrustPhoneValue', 'oxTrustImsValue',
                       'oxTrustPhotos', 'oxTrustAddresses', 'oxTrustRole',
                       'oxTrustEntitlements', 'oxTrustx509Certificate']
    # Rewriting all the new DNs in the new installation to ldif file
    for dn in currentDNs:
        new_entry = getEntry(current_ldif, dn)
        if dn not in old_dn_map.keys():
            #  Write directly to the file if there is no matching old DN data
            ldif_writer.unparse(dn, new_entry)
            continue

        old_entry = getEntry(os.path.join(backupFolder, old_dn_map[dn]), dn)
        for attr in old_entry.keys():
            if attr in ignoreList:
                continue

            if attr not in new_entry:
                new_entry[attr] = old_entry[attr]
            elif old_entry[attr] != new_entry[attr]:
                if len(old_entry[attr]) == 1:
                    try:
                        old_json = json.loads(old_entry[attr][0])
                        new_json = json.loads(new_entry[attr][0])
                        new_json = merge(new_json, old_json)
                        new_entry[attr] = [json.dumps(new_json)]
                    except:
                        new_entry[attr] = old_entry[attr]
                        logging.debug("Keeping old value for %s", attr)
                else:
                    new_entry[attr] = old_entry[attr]
                    logging.debug("Keep multiple old values for %s", attr)
        ldif_writer.unparse(dn, new_entry)

    # Pick all the left out DNs from the old DN map and write them to the LDIF
    for dn in sorted(old_dn_map, key=len):
        if dn in currentDNs:
            continue  # Already processed

        entry = getEntry(os.path.join(backupFolder, old_dn_map[dn]), dn)

        for attr in entry.keys():
            if attr not in multivalueAttrs:
                continue  # skip conversion

            attr_values = []
            for val in entry[attr]:
                json_value = None
                try:
                    json_value = json.loads(val)
                    if type(json_value) is list:
                        attr_values.extend([json.dumps(v) for v in json_value])
                except:
                    loggin.debug('Cannot parse multival %s in DN %s', attr, dn)
                    attr_values.append(val)
            entry[attr] = attr_values

        ldif_writer.unparse(dn, entry)

    # Finally
    processed_ldif.close()
 def __init__(self, input, output, password):
     LDIFParser.__init__(self, input)
     self.writer = LDIFWriter(output)
     self.password = password
Esempio n. 56
0
def processLDIF(backupFolder, newFolder):
    logging.info('Processing the LDIF data')
    current_ldif = os.path.join(newFolder, 'current.ldif')
    currentDNs = getDns(current_ldif)

    processed_ldif = open(os.path.join(newFolder, 'processed.ldif'), 'w')
    ldif_writer = LDIFWriter(processed_ldif)

    ignoreList = ['objectClass', 'ou', 'oxAuthJwks', 'oxAuthConfWebKeys']
    old_dn_map = getOldEntryMap(backupFolder)

    multivalueAttrs = [
        'oxTrustEmail', 'oxTrustPhoneValue', 'oxTrustImsValue',
        'oxTrustPhotos', 'oxTrustAddresses', 'oxTrustRole',
        'oxTrustEntitlements', 'oxTrustx509Certificate'
    ]
    # Rewriting all the new DNs in the new installation to ldif file
    for dn in currentDNs:
        new_entry = getEntry(current_ldif, dn)
        if dn not in old_dn_map.keys():
            #  Write directly to the file if there is no matching old DN data
            ldif_writer.unparse(dn, new_entry)
            continue

        old_entry = getEntry(os.path.join(backupFolder, old_dn_map[dn]), dn)
        for attr in old_entry.keys():
            if attr in ignoreList:
                continue

            if attr not in new_entry:
                new_entry[attr] = old_entry[attr]
            elif old_entry[attr] != new_entry[attr]:
                if len(old_entry[attr]) == 1:
                    try:
                        old_json = json.loads(old_entry[attr][0])
                        new_json = json.loads(new_entry[attr][0])
                        new_json = merge(new_json, old_json)
                        new_entry[attr] = [json.dumps(new_json)]
                    except:
                        new_entry[attr] = old_entry[attr]
                        logging.debug("Keeping old value for %s", attr)
                else:
                    new_entry[attr] = old_entry[attr]
                    logging.debug("Keep multiple old values for %s", attr)
        ldif_writer.unparse(dn, new_entry)

    # Pick all the left out DNs from the old DN map and write them to the LDIF
    for dn in sorted(old_dn_map, key=len):
        if dn in currentDNs:
            continue  # Already processed

        entry = getEntry(os.path.join(backupFolder, old_dn_map[dn]), dn)

        for attr in entry.keys():
            if attr not in multivalueAttrs:
                continue  # skip conversion

            attr_values = []
            for val in entry[attr]:
                json_value = None
                try:
                    json_value = json.loads(val)
                    if type(json_value) is list:
                        attr_values.extend([json.dumps(v) for v in json_value])
                except:
                    loggin.debug('Cannot parse multival %s in DN %s', attr, dn)
                    attr_values.append(val)
            entry[attr] = attr_values

        ldif_writer.unparse(dn, entry)

    # Finally
    processed_ldif.close()