def __init__(self,input,output,root,ns1,ns2,outputdir='.'): LDIFParser.__init__(self,input) rantoday = open('.rantoday', 'r') try: serial = rantoday.readline() print serial if serial != '': serial = int(serial) else: serial = 0 serialNum = serial + 1 rantoday.close() rantoday = open('.rantoday', 'w+') rantoday.write(str(serialNum)) print serialNum rantoday.close() except IOError: exit("couldn't read local directory, to create a .rantoday file to help counting the reruns to get the serial to increase") self.serial = serialNum self.writer = LDIFWriter(output) self.megaArray = {} self.cnamed = [] self.valueInEntries = [] self.megaTree = {} self.subDomainRecords = {} self.root = root self.ns1 = ns1 self.ns2 = ns2 self.zoneSubDirectory = outputdir self.megaWeirdArray= {} self.zoneArray = {} self.zoneArray = {} self.managedZones = {} self.exempted = {2: ['co.uk', 'org.ua', 'com.ar']}
def ldif_writer(self): if not self._ldif_writer: logger.info( 'Original zones will be saved in LDIF format in ' '%s file', self.backup_path) self._ldif_writer = LDIFWriter(open(self.backup_path, 'w')) return self._ldif_writer
def __init__(self, diff_fil): """ `diff_fil`: the file-like object into which the incremental ldifs will be written. """ self.writer = LDIFWriter(diff_fil) # Unfortunately we have to maintain this separately from the # LDIFWriter since the writer appears to offer no way to # delete a full dn. See handle_delete. self.diff_fil = diff_fil
def ldap(self, command, attrs): self.logger.debug('received command %s %s', command, attrs) if command == 'SEARCH': out = StringIO() ldif_writer = LDIFWriter(out) qs = get_user_model().objects.all() if attrs['filter'] != '(objectClass=*)': m = re.match(r'\((\w*)=(.*)\)', attrs['filter']) if not m: print 'RESULT' print 'code: 1' print 'info: invalid filter' print return for user_attribute, ldap_attribute in MAPPING.iteritems(): if ldap_attribute == m.group(1): break else: print 'RESULT' print 'code: 1' print 'info: unknown attribute in filter' print return value = m.group(2) if value.endswith('*') and value.startswith('*'): user_attribute += '__icontains' value = value[1:-1] elif value.endswith('*'): user_attribute += '__istartswith' value = value[:-1] elif value.startswith('*'): user_attribute += '__iendswith' value = value[1:] else: user_attribute += '__iexact' value = unescape_filter_chars(value) qs = qs.filter(**{user_attribute: value.decode('utf-8')}) for user in qs: o = {} for user_attribute, ldap_attribute in MAPPING.iteritems(): o[ldap_attribute] = [ unicode(getattr(user, user_attribute)).encode('utf-8') ] o['objectClass'] = ['inetOrgPerson'] dn = 'uid=%s,%s' % (escape_dn_chars( o['uid'][0]), attrs['suffix']) self.logger.debug(u'sending entry %s %s', dn, o) ldif_writer.unparse(dn, o) print out.getvalue(), out.close() print 'RESULT' print 'code: 0' print 'info: RockNRoll' print
def generate_ldif(action, use_template=False, sync_source='MDM', **kwargs): """generate ldif string by kwargs or from template. Parameters: use_template: boolean,'CREATESESSION' and 'CLOSESESSION' should use template, others False. sync_source: 'MDM' or 'Directory' **kwargs: all items of dict. """ output = StringIO() w = LDIFWriter(output) if use_template: if not templates.has_key(action): return "" d = templates[action] for i in d: w.unparse(*i) output.reset() r = output.read() output.close() if sync_source: r = r.format( sync_source=sync_source, current_time=datetime.today().strftime("%Y%m%d%H%M%SZ"), **kwargs) else: r = r.format(**kwargs) return r else: if not kwargs.has_key('dn'): output.close() return "" dn = kwargs.pop('dn') for k, v in kwargs.iteritems(): if not isinstance(v, list): kwargs[k] = [v] w.unparse(dn, kwargs) output.reset() r = output.read() output.close() return r
def action_dump(conn, basedn, shorten=True, rewrite_dn=True): writer = LDIFWriter(sys.stdout) try: for dn, attrs in conn.search_s(basedn, ldap.SCOPE_SUBTREE): if rewrite_dn: dn = (dn.decode("utf-8").replace( basedn, "dc=unified,dc=base,dc=dn").encode("utf-8")) if shorten: attrs = { k: [abbrev_value(v) for v in vals] for k, vals in attrs.iteritems() } try: writer.unparse(dn, attrs) except UnicodeDecodeError: writer.unparse(dn.decode("utf-8"), attrs) except ldap.NO_SUCH_OBJECT: print("No object '%s' in directory." % basedn, file=sys.stderr) sys.exit(1)
def ldap_export(): print(f"Exporting LDAP environment") try: all_entries = conn.search_s(search_base, search_scope) ldif_file = f"soca_export_{uuid.uuid4()}.ldif" ldif_content = [] for entry in all_entries: print(f"Export {entry[0]}") ldif_content.append(entry) print(f"Creating LDIF: {ldif_file}") ldif_writer = LDIFWriter(open(ldif_file, "wb")) for content in ldif_content: dn = content[0] record = content[1] ldif_writer.unparse(dn, record) except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] print(f"Unable to export LDAP environment due to {err}. {exc_type}, {fname}, {exc_tb.tb_lineno}") sys.exit(1)
def generate_ldif_from_list(action, array): """generate ldif string by array Parameters: array: a list contains several dicts which contains user or group info. """ if isinstance(array, list): output = StringIO() w = LDIFWriter(output) for a in array: if a.has_key('dn'): dn = a.pop('dn') for k, v in a.iteritems(): if not isinstance(v, list): a[k] = [v] w.unparse(dn, a) else: logger.error('the element of ldif does not have "dn": %s', a) output.reset() r = output.read() output.close() return r
def processBackupData(self): logging.info('Processing the LDIF data.') processed_fp = open(self.processTempFile, 'w') ldif_writer = LDIFWriter(processed_fp) currentDNs = self.getDns(self.currentData) old_dn_map = self.getOldEntryMap() ignoreList = [ 'objectClass', 'ou', 'oxIDPAuthentication', 'gluuFreeMemory', 'gluuSystemUptime', 'oxLogViewerConfig', 'gluuLastUpdate' ] multivalueAttrs = [ 'oxTrustEmail', 'oxTrustPhoneValue', 'oxTrustImsValue', 'oxTrustPhotos', 'oxTrustAddresses', 'oxTrustRole', 'oxTrustEntitlements', 'oxTrustx509Certificate' ] if self.oxIDPAuthentication == 1: ignoreList.remove('oxIDPAuthentication') # Rewriting all the new DNs in the new installation to ldif file for dn in currentDNs: new_entry = self.getEntry(self.currentData, dn) if "o=site" in dn: continue # skip all the o=site DNs if dn not in old_dn_map.keys(): # Write to the file if there is no matching old DN data ldif_writer.unparse(dn, new_entry) continue old_entry = self.getEntry( os.path.join(self.ldifDir, old_dn_map[dn]), dn) for attr in old_entry.keys(): if attr in ignoreList: continue if attr not in new_entry: new_entry[attr] = old_entry[attr] elif old_entry[attr] != new_entry[attr]: if len(old_entry[attr]) == 1: try: old_json = json.loads(old_entry[attr][0]) new_json = json.loads(new_entry[attr][0]) new_json = merge(new_json, old_json) new_entry[attr] = [json.dumps(new_json)] except: if attr == 'oxScript': new_entry[attr] = new_entry[attr] logging.debug("Keeping new value for %s", attr) else: new_entry[attr] = old_entry[attr] logging.debug("Keeping old value for %s", attr) else: new_entry[attr] = old_entry[attr] logging.debug("Keep multiple old values for %s", attr) ldif_writer.unparse(dn, new_entry) # Pick all the left out DNs from the old DN map and write them to the LDIF for dn in sorted(old_dn_map, key=len): if "o=site" in dn: continue # skip all the o=site DNs if dn in currentDNs: continue # Already processed entry = self.getEntry(os.path.join(self.ldifDir, old_dn_map[dn]), dn) for attr in entry.keys(): if attr not in multivalueAttrs: continue # skip conversion attr_values = [] for val in entry[attr]: json_value = None try: json_value = json.loads(val) if type(json_value) is list: attr_values.extend( [json.dumps(v) for v in json_value]) except: logging.debug('Cannot parse multival %s in DN %s', attr, dn) attr_values.append(val) entry[attr] = attr_values ldif_writer.unparse(dn, entry) # Finally processed_fp.close() # Update the Schema change for lastModifiedTime with open(self.processTempFile, 'r') as infile: with open(self.o_gluu, 'w') as outfile: for line in infile: line = line.replace("lastModifiedTime", "oxLastAccessTime") line = line.replace('oxAuthUmaResourceSet', 'oxUmaResource') if 'oxTrustAuthenticationMode' in line: line = line.replace('internal', 'auth_ldap_server') if 'oxAuthAuthenticationTime' in line: line = self.convertTimeStamp(line) if ("objectClass:" in line and line.split("objectClass: ")[1][:3] == 'ox-'): line = line.replace( line, 'objectClass: gluuCustomPerson' + '\n') if 'oxType' not in line and 'gluuVdsCacheRefreshLastUpdate' not in line and 'objectClass: person' not in line and 'objectClass: organizationalPerson' not in line and 'objectClass: inetOrgPerson' not in line: outfile.write(line) # parser = MyLDIF(open(self.currentData, 'rb'), sys.stdout) # atr = parser.parse() base64Types = [""] # for idx, val in enumerate(parser.entries): # if 'displayName' in val: # if val['displayName'][0] == 'SCIM Resource Set': # out = CreateLDIF(parser.getDNs()[idx], val, # base64_attrs=base64Types) # f = open(self.o_gluu, "a") # f.write('\n') # f.write(out) data = "".join( open(os.path.join(self.backupDir, 'ldif', 'site.ldif')).readlines()[4:-1]) open(os.path.join(self.backupDir, 'ldif', 'site.ldif'), "wb").write(data) filenames = [ self.o_site_static, os.path.join(self.backupDir, 'ldif', 'site.ldif') ] with open(self.o_site, 'w') as outfile: for fname in filenames: with open(fname) as infile: for line in infile: outfile.write(line)
def main(): def pretty_print(data): """ This function prints the output :param data: :return: """ object_count: int = 0 def log(text: str, level=1): if (level <= VERBOSITY): sys.stderr.write(str(text) + "\n") def central_print( text, level=1, space="-", size=max(20, int(os.popen('stty size', 'r').read().split()[1]))): log( space * ((size - len(text) - 2) // 2) + " " + text + " " + space * ((size - len(text) - 2) // 2), level) def run_watchdog(reverse, regexp, value, attribute): log( "watchdog debug: Reverse: {}, Regexp: {}, Value: {}, Attribute: {}" .format(reverse, regexp, value, attribute), 9) result = True try: if type(value).__name__ == 'list': for item in value: result = result and run_watchdog( reverse, regexp, item, attribute) else: if regexp.match(value.decode()): log( "Matched the regexp '{}' in attribute {}!".format( regexp.pattern, value), 7) if (reverse): log("WATCHDOG SPOTTED: {}".format(value)) return False else: if (not (reverse)): log( "WATCHDOG NOT MATCHING: {} on {}".format( value, regexp), 3) return False except TypeError: log("Unsupported watched object type on attribute {}, got {}". format(attribute, type(value).__name__)) return result def output(cn, content, object_type="Object"): central_print( "Object " + " '" + cn.split(",")[0].split("=")[1] + "'", 3) to_be_removed = [] for banned in FILTERED_CLASSES: pattern = re.compile("^" + str(banned) + "$") for value in content["objectClass"]: if pattern.match(str(value)) is not None: to_be_removed.append(value) else: log( "{} not matched for removal with {}".format( value, pattern.pattern), 9) for remove in to_be_removed: content["objectClass"].remove(remove) log("Removing objectClass: {}".format(remove), 4) to_be_removed = [] watchdog_result = True for key in content.keys(): if len(ATTRIBUTES_LIST) and key not in ATTRIBUTES_LIST: log( "Based on attributes list removing attribute {} ". format(key), 3) to_be_removed.append(key) for banned in FILTERED_ATTRIBUTES: pattern = re.compile("^" + str(banned) + "$") if pattern.match(key) is not None: to_be_removed.append(key) for (attribute, old_value, new_value) in SUBSTITUTE: if (key == attribute): log( "Found matching attribute {} for substitution". format(key), 9) if type(content[key]).__name__ == 'list': _to_be_removed = [] _to_be_added = [] for item in content[key]: if item.decode() == old_value: _to_be_removed.append(item) _to_be_added.append(new_value.encode()) log( "Replacing on key {} value {} for {} ". format(key, old_value, new_value), 5) else: log( "Array replace: {} is not {}".format( item, old_value), 9) for remove in _to_be_removed: content[key].remove(remove) for append in _to_be_added: content[key].append(append) elif content[key] == old_value: content[key] = new_value log( "Replacing on key {} value {} for {} ".format( key, old_value, new_value), 5) else: log( "Replace condition {}:{} not met key {}".format( attribute, old_value, key), 9) for (reverse, attribute, regexp) in WATCHER: if key != attribute: continue else: watchdog_result = run_watchdog(reverse, regexp, content[key], attribute) if watchdog_result: log( "Removing {} due to watchdog results".format( cn), 1) for remove in to_be_removed: try: content.pop(remove) log("Removing attribute: {}".format(remove), 4) except KeyError: log("Failed to remove {}, already removed?".format(remove), 9) if watchdog_result and WATCHDOG_STRICT: log("Watchdog blocked") return for (attribute, value) in NEW_ATTRIBUTES: value = value.encode() log( "Adding attribute {} with value {}".format( attribute, value), 4) try: content.get(attribute) if (type(content[attribute]).__name__ == 'list'): content[attribute].append(value) else: content[attribute] = value except KeyError: raise Exception( "Attribute {} is already present! For rewriting attributes use another function." .format(attribute)) if not len(content.keys()): if REMOVE_WHEN_ZERO_ATTRS: log( 'Removing {} because it has no attribute left. Rest in piece(s)' .format(cn), 6) return log('Please notice, that {} has no attributes!'.format(cn), 2) log(content, 9) OUTPUT_FILE.unparse(cn, content) log("Written the object {}".format(cn), 6) for (cn, content) in data: if cn.endswith(RESTRICT): object_count += 1 output(cn, content) else: log("{} restricted from output".format(cn), 10) log("", 2) central_print("FINISHED", 2) log("Object count: {}".format(object_count), 2) from ldif import LDIFRecordList, LDIFWriter import argparse import re import sys import os parser = argparse.ArgumentParser( description='LDIF manipulation program. Please check full in') parser.add_argument('-i', '--input', metavar='input', type=str, required=True, help='Ldif file to be proceed') parser.add_argument('-o', '--output', metavar='output', type=str, required=False, default=None, help='Ldif file to be outputed') parser.add_argument( '-r', '--restrict', metavar='restrict', type=str, default="", help= 'Restrict the output only to CN ending with this string. \nIf you wan\'t the object itself to be immitted prepend with ","' ) parser.add_argument( '-s', '--substitute', metavar='substitute', type=str, nargs="+", required=False, default=[], help= 'Substitutes matched attribute:key values for desired value. Syntax "attribute:old_value|new_value"' ) parser.add_argument( '-f', '--filter', metavar='filter', type=str, nargs="+", required=False, default=[], help='ObjectClass filter which will be removed from the user') parser.add_argument( '-a', '--attributes', metavar='attributes', type=str, nargs="+", required=False, default=[], help='Attributes filter which will be removed from the user') parser.add_argument( '-n', '--new', metavar='new', type=str, nargs="+", required=False, default=[], help= 'Attributes which needs to be added, appends to existings lists. Syntax "attribute:value"' ) parser.add_argument( '-v', '--verbosity', metavar='verbosity', type=int, required=False, default=1, help='Configure the verbosity level, 0 is nothing, 10 is a lot') parser.add_argument( '-w', '--watch', metavar='watch', type=str, nargs="+", required=False, default=[], help= 'Watch attributes with values matching regexp. Prepending the expression with \'^\' checks if the string does not match.\nUsage: \nuserClass:[Tt]roll\n^userPassword:{CRYPT}' ) parser.add_argument('-ww', metavar="watchdog_strict", type=bool, required=False, default=None, help="Delete after watchdog match? True/False") parser.add_argument( '-l', '--list', metavar='list', type=str, nargs="+", required=False, default=[], help= 'List of allowed attributes. Other attributes will be therefore removed' ) parser.add_argument('-z', '--zero', metavar='zero', type=bool, required=False, default=False, help='Remove object with zero attributes?') args = parser.parse_args() input_file = open(args.input, "r") if args.output is not None: OUTPUT_FILE = LDIFWriter(open(args.output, "w")) else: OUTPUT_FILE = LDIFWriter(sys.stdout) VERBOSITY = args.verbosity RESTRICT = args.restrict FILTERED_CLASSES = [x.encode() for x in args.filter] FILTERED_ATTRIBUTES = args.attributes WATCHER = [] WATCHDOG_STRICT = args.ww for watcher in args.watch: try: if watcher.startswith("^"): reverse = True watcher = watcher[1:] else: reverse = False attribute = watcher.split(":")[0] regexp = ":".join(watcher.split(":")[1:]) WATCHER.append( (reverse, attribute, re.compile("^" + regexp + "$"))) except KeyError: log( "Watcher string not matching our expectations! Check the help -h!", 0) SUBSTITUTE = [] for substitute in args.substitute: try: attribute = substitute.split(":")[0] old_value, new_value = ":".join( substitute.split(":")[1:]).split("|") SUBSTITUTE.append((attribute, old_value, new_value)) except KeyError: log( "Substitute string not matching expectations! Check the help -h", 0) NEW_ATTRIBUTES = [] try: NEW_ATTRIBUTES = [item.split(":", 1) for item in args.new] except KeyError: log("Attribute creation error! Check the help -h", 0) try: ATTRIBUTES_LIST = args.list except KeyError: log("Error when parsing allowed attributes list, check your syntax") REMOVE_WHEN_ZERO_ATTRS = True record_list = LDIFRecordList(input_file, max_entries=0) record_list.parse() pretty_print(record_list.all_records)
def processBackupData(self): logging.info('Processing the LDIF data.') processed_fp = open(self.processTempFile, 'w') ldif_writer = LDIFWriter(processed_fp, base64_attrs=['gluuProfileConfiguration']) currentDNs = self.getDns(self.currentData) old_dn_map = self.getOldEntryMap() ignoreList = [ 'objectClass', 'ou', 'oxIDPAuthentication', 'gluuFreeMemory', 'gluuSystemUptime', 'oxLogViewerConfig', 'gluuLastUpdate' ] multivalueAttrs = [ 'oxTrustEmail', 'oxTrustPhoneValue', 'oxTrustImsValue', 'oxTrustPhotos', 'oxTrustAddresses', 'oxTrustRole', 'oxTrustEntitlements', 'oxTrustx509Certificate' ] if self.ldap_type == 'opendj': ignoreList.remove('oxIDPAuthentication') # Rewriting all the new DNs in the new installation to ldif file nodn = len(currentDNs) for cnt, dn in enumerate(currentDNs): progress_bar(cnt, nodn, 'Rewriting DNs') new_entry = self.getEntry(self.currentData, dn) if "o=site" in dn: continue # skip all the o=site DNs if dn not in old_dn_map.keys(): # Write to the file if there is no matching old DN data ldif_writer.unparse(dn, new_entry) continue old_entry = self.getEntry( os.path.join(self.ldifDir, old_dn_map[dn]), dn) for attr in old_entry.keys(): if attr in ignoreList: continue if attr not in new_entry: new_entry[attr] = old_entry[attr] elif old_entry[attr] != new_entry[attr]: if len(old_entry[attr]) == 1: try: old_json = json.loads(old_entry[attr][0]) new_json = json.loads(new_entry[attr][0]) new_json = merge(new_json, old_json) new_entry[attr] = [json.dumps(new_json)] except: if attr == 'oxScript': new_entry[attr] = new_entry[attr] logging.debug("Keeping new value for %s", attr) else: new_entry[attr] = old_entry[attr] logging.debug("Keeping old value for %s", attr) else: new_entry[attr] = old_entry[attr] logging.debug("Keep multiple old values for %s", attr) ldif_writer.unparse(dn, new_entry) progress_bar(0, 0, 'Rewriting DNs', True) # Pick all the left out DNs from the old DN map and write them to the LDIF nodn = len(old_dn_map) ldif_shelve_dict = {} for cnt, dn in enumerate(sorted(old_dn_map, key=len)): progress_bar(cnt, nodn, 'Perapring DNs for 3.1.2') if "o=site" in dn: continue # skip all the o=site DNs if dn in currentDNs: continue # Already processed cur_ldif_file = old_dn_map[dn] if not cur_ldif_file in ldif_shelve_dict: sdb = DBLDIF(os.path.join(self.ldifDir, cur_ldif_file)) sdb.parse() ldif_shelve_dict[cur_ldif_file] = sdb.sdb entry = ldif_shelve_dict[cur_ldif_file][dn] #MB: (1) TODO: instead of processing ldif twice, appy this method for (2) if 'ou=people' in dn: for o in entry['objectClass']: if o.startswith('ox-'): entry['objectClass'].remove(o) entry['objectClass'].append('gluuCustomPerson') break if 'ou=trustRelationships' in dn: if 'gluuIsFederation' in entry: if entry['gluuIsFederation'][0] == 'true': entry['gluuEntityType'] = ['Federation/Aggregate'] else: entry['gluuEntityType'] = ['Single SP'] entry['gluuSpecificRelyingPartyConfig'] = ['true'] LONGBASE64ENCODEDSTRING = '<rp:ProfileConfiguration xsi:type="saml:SAML2SSOProfile" \n\tincludeAttributeStatement="true"\n\tassertionLifetime="300000"\n\tassertionProxyCount="0"\n\tsignResponses="conditional"\n\tsignAssertions="never"\n\tsignRequests="conditional"\n\tencryptAssertions="conditional"\n\tencryptNameIds="never"\n/>' if not 'gluuProfileConfiguration' in entry: entry['gluuProfileConfiguration'] = [ LONGBASE64ENCODEDSTRING ] else: entry['gluuProfileConfiguration'].append( LONGBASE64ENCODEDSTRING) for attr in entry.keys(): if attr not in multivalueAttrs: continue # skip conversion attr_values = [] for val in entry[attr]: json_value = None try: json_value = json.loads(val) if type(json_value) is list: attr_values.extend( [json.dumps(v) for v in json_value]) else: attr_values.append(val) except: logging.debug('Cannot parse multival %s in DN %s', attr, dn) attr_values.append(val) entry[attr] = attr_values if 'oxAuthClientCustomAttributes' in entry['objectClass']: entry['objectClass'].remove('oxAuthClientCustomAttributes') ldif_writer.unparse(dn, entry) # Finally processed_fp.close() progress_bar(0, 0, 'Perapring DNs for 3.1.2', True) #MB: (2) replace the following with above method # Update the Schema change for lastModifiedTime nodn = sum(1 for line in open(self.processTempFile)) with open(self.processTempFile, 'r') as infile: with open(self.o_gluu, 'w') as outfile: for cnt, line in enumerate(infile): progress_bar(cnt, nodn, 'converting Dns') line = line.replace("lastModifiedTime", "oxLastAccessTime") line = line.replace('oxAuthUmaResourceSet', 'oxUmaResource') if ("gluuAttributeOrigin:" in line and line.split("gluuAttributeOrigin: ")[1][:3] == 'ox-'): line = 'gluuAttributeOrigin: gluuCustomPerson' + '\n' if ("gluuAttributeOrigin:" in line and 'inetOrgPerson' in line): line = 'gluuAttributeOrigin: gluuCustomPerson' + '\n' if 'oxAuthAuthenticationTime' in line: line = self.convertTimeStamp(line) if 'oxAuthenticationMode' in line: line = 'oxAuthenticationMode: auth_ldap_server' + '\n' if 'oxTrustAuthenticationMode' in line: line = 'oxTrustAuthenticationMode: auth_ldap_server' + '\n' #MB: See (1) how we implement this #if ("objectClass:" in line and line.split("objectClass: ")[1][:3] == 'ox-'): # line = line.replace(line, 'objectClass: gluuCustomPerson' + '\n') if 'oxType' not in line and 'gluuVdsCacheRefreshLastUpdate' not in line and 'objectClass: person' not in line and 'objectClass: organizationalPerson' not in line and 'objectClass: inetOrgPerson' not in line: outfile.write(line) # parser = MyLDIF(open(self.currentData, 'rb'), sys.stdout) # atr = parser.parse() base64Types = [""] # for idx, val in enumerate(parser.entries): # if 'displayName' in val: # if val['displayName'][0] == 'SCIM Resource Set': # out = CreateLDIF(parser.getDNs()[idx], val, # base64_attrs=base64Types) # f = open(self.o_gluu, "a") # f.write('\n') # f.write(out) #data="".join(open( os.path.join(self.backupDir, 'ldif','site.ldif')).readlines()[4:-1]) #open(os.path.join(self.backupDir, 'ldif','sitetmp.ldif'),"wb").write(data) #filenames = [self.o_site_static, os.path.join(self.backupDir, 'ldif','sitetmp.ldif')] #with open(self.o_site, 'w') as outfile: # for fname in filenames: # with open(fname) as infile: # for line in infile: # outfile.write(line) #os.remove(os.path.join(self.backupDir, 'ldif','sitetmp.ldif')) progress_bar(0, 0, 'converting Dns', True)
def execute(self, **options): ldap = self.api.Backend.ldap2 # check LDAP if forwardzones already uses new semantics dns_container_dn = DN(self.api.env.container_dns, self.api.env.basedn) try: container_entry = ldap.get_entry(dns_container_dn) except errors.NotFound: # DNS container not found, nothing to upgrade return False, [] for config_option in container_entry.get("ipaConfigString", []): matched = re.match("^DNSVersion\s+(?P<version>\d+)$", config_option, flags=re.I) if matched and int(matched.group("version")) >= 1: # forwardzones already uses new semantics, # no upgrade is required return False, [] self.log.info('Updating forward zones') # update the DNSVersion, following upgrade can be executed only once container_entry.setdefault('ipaConfigString', []).append(u"DNSVersion 1") ldap.update_entry(container_entry) # Updater in IPA version from 4.0 to 4.1.2 doesn't work well, this # should detect if update in past has been executed, and set proper # DNSVersion into LDAP try: fwzones = self.api.Command.dnsforwardzone_find()['result'] except errors.NotFound: # No forwardzones found, update probably has not been executed yet pass else: if fwzones: # fwzones exist, do not execute upgrade again return False, [] zones = [] try: # raw values are required to store into ldif zones = self.api.Command.dnszone_find(all=True, raw=True, sizelimit=0)['result'] except errors.NotFound: pass if not zones: self.log.info('No DNS zone to update found') return False, [] zones_to_transform = [] for zone in zones: if (zone.get('idnsforwardpolicy', [u'first'])[0] == u'none' or zone.get('idnsforwarders', []) == []): continue # don't update zone zones_to_transform.append(zone) if zones_to_transform: # add time to filename self.backup_path = time.strftime(self.backup_path) # DNs of privileges which contain dns managed permissions privileges_to_ldif = set() # store priviledges only once zone_to_privileges = {} # zone: [privileges cn] self.log.info( 'Zones with specified forwarders with policy different' ' than none will be transformed to forward zones.') self.log.info('Original zones will be saved in LDIF format in ' '%s file' % self.backup_path) try: with open(self.backup_path, 'w') as f: writer = LDIFWriter(f) for zone in zones_to_transform: # save backup to ldif try: dn = str(zone['dn']) del zone[ 'dn'] # dn shouldn't be as attribute in ldif writer.unparse(dn, zone) if 'managedBy' in zone: entry = ldap.get_entry(DN( zone['managedBy'][0])) for privilege_member_dn in entry.get( 'member', []): privileges_to_ldif.add(privilege_member_dn) writer.unparse(str(entry.dn), dict(entry.raw)) # privileges where permission is used if entry.get('member'): zone_to_privileges[zone['idnsname'] [0]] = entry['member'] # raw values are required to store into ldif records = self.api.Command['dnsrecord_find']( zone['idnsname'][0], all=True, raw=True, sizelimit=0)['result'] for record in records: if record['idnsname'][0] == u'@': # zone record was saved before continue dn = str(record['dn']) del record['dn'] writer.unparse(dn, record) except Exception, e: self.log.error('Unable to backup zone %s' % zone['idnsname'][0]) self.log.error(traceback.format_exc()) return False, [] for privilege_dn in privileges_to_ldif: try: entry = ldap.get_entry(privilege_dn) writer.unparse(str(entry.dn), dict(entry.raw)) except Exception, e: self.log.error('Unable to backup privilege %s' % privilege_dn) self.log.error(traceback.format_exc()) return False, [] f.close()
def __init__(self): self.sio = StringIO() self.ldif_writer = LDIFWriter(self.sio)
def __init__(self, output): self.writer = LDIFWriter(output)
def __init__(self, f_import, f_outport): self.f_outport = f_outport self.writer = LDIFWriter(self.f_outport) super().__init__(f_import)
def __init__(self, input, output, password): LDIFParser.__init__(self, input) self.writer = LDIFWriter(output) self.password = password
self.inumOrg = dne[0][0][1] if not self.inumApllience and 'gluuAppliance' in entry['objectClass']: self.inumApllience_dn = dn dne = str2dn(dn) self.inumApllience = dne[0][0][1] ldif_parser = MyLDIF(open('gluu.ldif')) ldif_parser.parse() inumOrg_ou = 'o=' + ldif_parser.inumOrg inumApllience_inum = 'inum='+ ldif_parser.inumApllience processed_fp = open('gluu_noinum.ldif','w') ldif_writer = LDIFWriter(processed_fp) def checkIfAsimbaEntry(new_entry): for objCls in ('oxAsimbaConfiguration', 'oxAsimbaIDP', 'oxAsimbaRequestorPool', 'oxAsimbaSPRequestor', 'oxAsimbaSelector'): if objCls in new_entry['objectClass']: return True for dn in ldif_parser.DNs: dne = explode_dn(dn) new_entry = ldif_parser.entries[dn]