def __init__(self,input,output,root,ns1,ns2,outputdir='.'): LDIFParser.__init__(self,input) rantoday = open('.rantoday', 'r') try: serial = rantoday.readline() print serial if serial != '': serial = int(serial) else: serial = 0 serialNum = serial + 1 rantoday.close() rantoday = open('.rantoday', 'w+') rantoday.write(str(serialNum)) print serialNum rantoday.close() except IOError: exit("couldn't read local directory, to create a .rantoday file to help counting the reruns to get the serial to increase") self.serial = serialNum self.writer = LDIFWriter(output) self.megaArray = {} self.cnamed = [] self.valueInEntries = [] self.megaTree = {} self.subDomainRecords = {} self.root = root self.ns1 = ns1 self.ns2 = ns2 self.zoneSubDirectory = outputdir self.megaWeirdArray= {} self.zoneArray = {} self.zoneArray = {} self.managedZones = {} self.exempted = {2: ['co.uk', 'org.ua', 'com.ar']}
def data_from_ldif(self, filename): with open(filename, "rb") as fd: parser = LDIFParser(fd) for dn, entry in parser.parse(): doc_id = doc_id_from_dn(dn) oc = entry.get("objectClass") or entry.get("objectclass") if oc: if "top" in oc: oc.remove("top") if len(oc) == 1 and oc[0].lower() in ("organizationalunit", "organization"): continue table_name = oc[-1] if "objectClass" in entry: entry.pop("objectClass") elif "objectclass" in entry: entry.pop("objectclass") attr_mapping = OrderedDict({ "doc_id": doc_id, "objectClass": table_name, "dn": dn, }) for attr in entry: value = self.transform_value(attr, entry[attr]) attr_mapping[attr] = value yield table_name, attr_mapping
def import_ldif(self, bucket_mappings): ctx = prepare_template_ctx(self.manager) attr_processor = AttrProcessor() for _, mapping in bucket_mappings.items(): for file_ in mapping["files"]: logger.info(f"Importing {file_} file") src = f"/app/templates/{file_}" dst = f"/app/tmp/{file_}" os.makedirs(os.path.dirname(dst), exist_ok=True) render_ldif(src, dst, ctx) with open(dst, "rb") as fd: parser = LDIFParser(fd) for dn, entry in parser.parse(): if len(entry) <= 2: continue key = id_from_dn(dn) entry["dn"] = [dn] entry = transform_entry(entry, attr_processor) data = json.dumps(entry) # using INSERT will cause duplication error, but the data is left intact query = 'INSERT INTO `%s` (KEY, VALUE) VALUES ("%s", %s)' % ( mapping["bucket"], key, data) req = self.client.exec_query(query) if not req.ok: logger.warning( "Failed to execute query, reason={}".format( req.json()))
def ldap_import(ldif_file): print(f"Importing LDAP environment from {ldif_file}") try: parser = LDIFParser(open(ldif_file, "rb")) for dn, record in parser.parse(): print(f"Importing {dn}") # print(record) # Uncomment this file to print record if you want to debug tuple_list_as_bytes = [] for attribute_name, attribute_values in record.items(): value_as_bytes = [] for value in attribute_values: value_as_bytes.append(value.encode("utf-8")) # str to bytes tuple_list_as_bytes.append((attribute_name, value_as_bytes)) try: conn.add_s(dn, tuple_list_as_bytes) print("Import successful") except ldap.ALREADY_EXISTS: print("Entry already existing in your LDAP, ignoring ...") except Exception as err: print(f"Unable to import record due to {err}") except FileNotFoundError: print(f"Unable to locate {ldif_file}. Make sure the path is correct") sys.exit(1) except ValueError as err: print(f"Unable to read ldif. Make sure the LDIF was created with this utility. Error {err}") sys.exit(1) except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] print(f"Unable to import LDAP environment due to {err}. {exc_type}, {fname}, {exc_tb.tb_lineno}") sys.exit(1)
def __init__(self, input, output): LDIFParser.__init__(self, input) self.targetDN = None self.targetAttr = None self.targetEntry = None self.DNs = [] self.lastDN = None self.lastEntry = None
def __init__(self, input_fd): LDIFParser.__init__(self, input_fd) self.DNs = [] self.entries = {} self.inumOrg = None self.inumOrg_dn = None self.inumApllience = None self.inumApllience_dn = None
def __init__(self, ldif_file): LDIFParser.__init__(self, open(ldif_file,'rb')) db_file = os.path.basename(ldif_file) sdb_file = os.path.join('/tmp', db_file+'.sdb') if os.path.exists(sdb_file): os.remove(sdb_file) #logging.info("\nDumping %s to shelve database" % ldif_file) self.sdb = shelve.open(sdb_file)
def __init__(self, input, vipGroups, debug=False): self.vipGroups = {} for g in vipGroups: self.vipGroups[g] = [] self.debug = debug LDIFParser.__init__(self, input)
def __init__(self, infile=None, outfile=None): if infile: self._if = open(infile, "r") else: self._if = sys.stdin if outfile: self._of = open(outfile, "w") else: self._of = sys.stdout LDIFParser.__init__(self, self._if)
def __init__(self, ldif_fil, ldif_entries): """ `ldif_fil`: The file-like object from which to read ldif entries. `ldif_entries`: the queue into which to dump the entries as DnEntry namedtuples. """ LDIFParser.__init__(self, ldif_fil) self.ldif_entries = ldif_entries self.sort_enforcer = SortEnforcer()
def __init__(self, input, output, keep_dn=False): LDIFParser.__init__(self, input) self.keep_dn = keep_dn self.targetDN = None self.targetAttr = None self.targetEntry = None self.DNs = [] self.lastDN = None self.lastEntry = None self.entries = [] self.dn_entry = []
def __init__(self, input, output, settings, clients_ldif=None, editor=None): LDIFParser.__init__(self, input) self.domains = {} self.domains_lookup = ( clients_ldif.domains_lookup if clients_ldif else None ) self.editor = editor self.a_record_ips = set([]) self.a_record_hosts = {} self.server = Server(settings.couchdb_uri) self.db = self.server.get_db(settings.couchdb_db)
def parse_ldif_file(ldif_file: str) -> list[tuple[str, dict[str, Any]]]: logger.info(f"### Parsing LDIF file {ldif_file}") orig_ldif_fd = open(ldif_file, "rb") ldif_fd = BytesIO() for line in orig_ldif_fd.readlines(): if line.startswith(b"# search result"): break ldif_fd.write(line) ldif_fd.seek(0) parser = LDIFParser(ldif_fd) return list(parser.parse())
def __init__(self, new_ldif_fil, old_ldif_entries, handler): """ `new_ldif_fil`: the file-like object containg the new ldif entries. `old_ldif_entries`: a queue from which the old_ldif_entries can be pulled in lexical order by dn. `handler: an object that can handle the additions, changes, and deletions that the LDiffer detects. """ LDIFParser.__init__(self, new_ldif_fil) self.old_ldif_entries = old_ldif_entries self.handler = handler self.sort_enforcer = SortEnforcer() self.cur_old_dn_entry = None # prime the pump with the first entry from the old ldif. self._pull_old_dn_entry()
def import_ldif(self): optional_scopes = json.loads(self.manager.config.get("optional_scopes", "[]")) ldif_mappings = get_ldif_mappings(optional_scopes) # hybrid means only a subsets of ldif are needed persistence_type = os.environ.get("CN_PERSISTENCE_TYPE", "ldap") ldap_mapping = os.environ.get("CN_PERSISTENCE_LDAP_MAPPING", "default") if persistence_type == "hybrid": mapping = ldap_mapping ldif_mappings = {mapping: ldif_mappings[mapping]} # # these mappings require `base.ldif` # opt_mappings = ("user", "token",) # `user` mapping requires `o=gluu` which available in `base.ldif` # if mapping in opt_mappings and "base.ldif" not in ldif_mappings[mapping]: if "base.ldif" not in ldif_mappings[mapping]: ldif_mappings[mapping].insert(0, "base.ldif") ctx = prepare_template_ctx(self.manager) for mapping, files in ldif_mappings.items(): self.check_indexes(mapping) for file_ in files: logger.info(f"Importing {file_} file") src = f"/app/templates/{file_}" dst = f"/app/tmp/{file_}" os.makedirs(os.path.dirname(dst), exist_ok=True) render_ldif(src, dst, ctx) with open(dst, "rb") as fd: parser = LDIFParser(fd) for dn, entry in parser.parse(): self.add_entry(dn, entry)
def __init__(self, input_name, replacement_table=None): replacement_table = replacement_table if replacement_table else {} # we must keep this one handy because the LDIFParser # will not access it immediately, and it must stay open. self.stringio = cStringIO.StringIO() with open('%s/schemas/%s.ldif' % ('/usr/share/licorn', input_name), 'rb') as ldapf: data = ldapf.read() for key, value in replacement_table.iteritems(): data = data.replace(key, value) self.stringio.write(data) self.stringio.seek(0) LDIFParser.__init__(self, self.stringio) #print '%s/schemas/%s.ldif' % ( # os.path.dirname(__file__),input_name) self.ldap_data = []
def get_backup_data(self): print('Checking backup file {0}.'.format(self.args.backup_file)) with gzip.open(self.args.backup_file, 'rb') as f: self.ldif_parser = LDIFParser(f) self.ldif_parser.handle = self.ldap_parser_handle self.ldif_parser.parse()
def __init__(self,input,database_writer): LDIFParser.__init__(self,input) self.writer = database_writer
def __init__(self, input, output, editor=None): LDIFParser.__init__(self, input) self.clients = {} self.domains_lookup = {} self.editor = editor
def __init__(self, input, output): LDIFParser.__init__(self, input) self.writer = OpenLdapSchemaWriter(output)
def __init__(self, descriptor): self.classes = {} LDIFParser.__init__(self, descriptor)
def __init__(self, input, output, password): LDIFParser.__init__(self, input) self.writer = LDIFWriter(output) self.password = password
def __init__(self, input_file, processing_object): LDIFParser.__init__(self, input_file) self.processing_object = processing_object
def __init__(self, input): LDIFParser.__init__(self, input) self.attributeDictionary = dict()
def __init__(self, input, attributeDictionary, output): LDIFParser.__init__(self, input) self.attributeDictionary = attributeDictionary self.defaultOutput = output
def __init__(self, input): LDIFParser.__init__(self, input) self.data = {} self.data['entries'] = []
def __init__(self, input): LDIFParser.__init__(self, input) self.objectclasses = {} self.attributes = {}
def __init__(self,input): LDIFParser.__init__(self,input)
def __init__(self, input_file, log): LDIFParser.__init__(self, input_file) self.log = log self.date = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S +0000") self.entries = []
def __init__(self, input, output): LDIFParser.__init__(self, input) self.writer = LDIFWriter(output)
def __init__(self,input,output): LDIFParser.__init__(self,input) self.writer = LDIFWriter(output)
def parseAttrTypeandValue(self): return LDIFParser._parseAttrTypeandValue(self)
def __init__(self, descriptor): self.users = {} LDIFParser.__init__(self, descriptor)
def __init__(self,input,output): LDIFParser.__init__(self,input) self.attrs = {}
def __init__(self, input): LDIFParser.__init__(self, input)
def __init__(self, input): LDIFParser.__init__(self, input) self.imp_data = []
in_file_ext = in_file_path.suffix if in_file_ext not in valid_ext: fatal_error("Error: Invalid input file!") if not in_file_path.exists() or in_file_path.is_dir(): fatal_error("Error: Invalid input file path!") with in_file_path.open('rb') as f: if in_file_ext == '.ml': ml_bytes = f.read() ml = CscaMasterList.load(ml_bytes) verify_and_extract_masterlist(ml, default_out_dir_csca.joinpath(get_ml_out_dir_name(ml)) ) else: parser = LDIFParser(f) print("Note: DSC and CRL won't be verified against issuing CSCA!") for dn, entry in parser.parse(): # ML if 'CscaMasterListData' in entry: ml = entry['CscaMasterListData'][0] ml = CscaMasterList.load(ml) verify_and_extract_masterlist(ml, default_out_dir_csca.joinpath(get_ml_out_dir_name(ml)) ) # DSC elif 'userCertificate' in entry or 'userCertificate;binary' in entry: dn = parse_dn(dn) dsc = entry['userCertificate;binary'][0]
class MyRestore(): def __init__(self, args=None, add_blacklist_attrs=None): self.args = args self.add_blacklist_attrs = add_blacklist_attrs self.unique_member_of = set() self.backup_data = None self.ldap_data = None self.lo, self.position = univention.admin.uldap.getAdminConnection() self.co = univention.admin.config.config() self.ucr = univention.config_registry.ConfigRegistry() self.ucr.load() univention.admin.modules.update() self.operational_mark = [ 'directoryOperation', 'dSAOperation', 'distributedOperation' ] self.operational_attributes = set(['entryCSN', 'entrycsn']) self.get_operational_attributes() def get_backup_data(self): print('Checking backup file {0}.'.format(self.args.backup_file)) with gzip.open(self.args.backup_file, 'rb') as f: self.ldif_parser = LDIFParser(f) self.ldif_parser.handle = self.ldap_parser_handle self.ldif_parser.parse() def identify_udm(self, entry): try: udm_type = entry.get('univentionObjectType', [None])[0] univention.admin.modules.update() udm = univention.admin.modules.get(udm_type) univention.admin.modules.init(self.lo, self.position, udm) return udm.object(self.co, self.lo, self.position, dn=self.args.dn, attributes=entry) except Exception: return None def ldap_parser_handle(self, dn, entry): if self.args.verbose or self.args.list_dns: print('\t{0}: {1}'.format(self.ldif_parser.records_read, dn)) if self.args.dn: if self.args.restore_membership: if self.args.dn.lower() in map(str.lower, entry.get('uniqueMember', [])): self.unique_member_of.add(dn) if self.args.dn.lower() == dn.lower(): if not self.args.restore_membership: self._max_entries = 1 self.backup_data = entry def get_ldap_data(self): self.ldap_data = self.lo.get(self.args.dn) def get_operational_attributes(self): schema = self.lo.search(filter='(objectClass=subschema)', base='cn=subschema', scope='base', attr=['*', '+']) for i in schema[0][1].get('attributeTypes'): for j in self.operational_mark: if j.lower() in i.lower(): attr = i.split('NAME ')[1].split("'")[1] self.operational_attributes.add(attr) self.operational_attributes.add(attr.lower()) def create_modlist(self, new=None, old=None): ml = list() if new and not old: ml = addModlist(new, ignore_attr_types=self.operational_attributes) elif new and old: ml = modifyModlist(old, new, ignore_attr_types=self.operational_attributes) return ml def dn_exists(self, dn): return bool(self.lo.get(dn)) def check_blacklist_attrs(self): for attr in self.add_blacklist_attrs: val = self.backup_data.get(attr, [None])[0] if val: l_filter = '({0}={1})'.format(attr, escape_filter_chars(val)) res = self.lo.search(l_filter) if res: return argparse.Namespace(value=val, attr=attr, dn=res[0][0]) return None # modify def update_from_backup(self): self.get_ldap_data() ml = self.create_modlist(new=self.backup_data, old=self.ldap_data) if ml: if self.args.verbose or self.args.dry_run: print('\tUpdating {0} with modlist:'.format(self.args.dn)) my_pretty_print(ml) if not self.args.dry_run: try: self.lo.lo.modify_ext_s(self.args.dn, ml) except Exception: print('ERROR: Modify {0} with attributes'.format( self.args.dn)) pprint.pprint(ml) print('failed with:') raise else: print('No changes from backup data.') def update_membership(self): if self.args.dry_run: udm_object = self.backup_udm_object else: udm_object = self.ldap_udm_object udm_object.open() if 'groups' in udm_object: udm_object['groups'] = list() for grp in self.unique_member_of: if self.dn_exists(grp): if self.args.verbose or self.args.dry_run: print('Adding group {0} to {1}'.format( grp, self.args.dn)) udm_object['groups'].append(grp) if not self.args.dry_run: udm_object.modify() def add_from_backup(self): ml = self.create_modlist(new=self.backup_data) if self.args.verbose or self.args.dry_run: print('\tAdding {0} with modlist:'.format(self.args.dn)) my_pretty_print(ml) if not self.args.dry_run: try: self.lo.add(self.args.dn, ml, exceptions=True) except Exception: print('ERROR: Adding {0} with attributes'.format(self.args.dn)) pprint.pprint(ml) print('failed with:') raise def delete_in_ldap(self): udm_object = self.ldap_udm_object if udm_object: udm_object.open() if self.args.verbose or self.args.dry_run: print('\tRemoving {0} from LDAP.'.format(self.args.dn)) if not self.args.dry_run: udm_object.remove() # properties @property def backup_data(self): return self.backup_data @property def backup_udm_object(self): return self.identify_udm(self.backup_data) @property def ldap_udm_object(self): self.get_ldap_data() return self.identify_udm(self.ldap_data) @property def unique_member_of(self): return self.unique_member_of @property def ldap_data(self): return self.ldap_data
def __init__(self, input, vipGroups, debug=False): self.vipGroups = vipGroups self.debug = debug self.phoneBook = fritzbox.phonebook.Phonebook() LDIFParser.__init__(self, input)
def __init__(self, inputFile, group): LDIFParser.__init__(self, inputFile) self.group = SysGroup.objects.get(group_name=group) self.PROCESS_ONLY = 'CN=Users,DC=zsplana,DC=cz'
def __init__(self, url, ldif_file): fh = open(ldif_file, 'rb') LDIFParser.__init__(self, fh) self.url = url
def __init__(self, ldif_file): LDIFParser.__init__(self, open(ldif_file, 'rb')) self.entries = []
def __init__(self, input, ucr): LDIFParser.__init__(self, input) self.ucr = ucr self.uuid = u'00000000-0000-0000-0000-000000000000'
def __init__(self, inputFile, skip): LDIFParser.__init__(self, inputFile) self.SKIP_DN = skip self.PROCESS_ONLY = 'CN=Users,DC=zsplana,DC=cz'
def __init__(self, ldif_file): LDIFParser.__init__(self, open(ldif_file,'rb')) self.entries = []
def __init__(self, input): LDIFParser.__init__(self, input) self.date = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S +0000")
def __init__(self, fobj): LDIFParser.__init__(self, fobj)