Beispiel #1
0
    def data_from_ldif(self, filename):
        with open(filename, "rb") as fd:
            parser = LDIFParser(fd)

            for dn, entry in parser.parse():
                doc_id = doc_id_from_dn(dn)

                oc = entry.get("objectClass") or entry.get("objectclass")
                if oc:
                    if "top" in oc:
                        oc.remove("top")

                    if len(oc) == 1 and oc[0].lower() in ("organizationalunit",
                                                          "organization"):
                        continue

                table_name = oc[-1]

                if "objectClass" in entry:
                    entry.pop("objectClass")
                elif "objectclass" in entry:
                    entry.pop("objectclass")

                attr_mapping = OrderedDict({
                    "doc_id": doc_id,
                    "objectClass": table_name,
                    "dn": dn,
                })

                for attr in entry:
                    value = self.transform_value(attr, entry[attr])
                    attr_mapping[attr] = value
                yield table_name, attr_mapping
Beispiel #2
0
def ldap_import(ldif_file):
    print(f"Importing LDAP environment from {ldif_file}")
    try:
        parser = LDIFParser(open(ldif_file, "rb"))
        for dn, record in parser.parse():
            print(f"Importing {dn}")
            # print(record) # Uncomment this file to print record if you want to debug
            tuple_list_as_bytes = []
            for attribute_name, attribute_values in record.items():
                value_as_bytes = []
                for value in attribute_values:
                    value_as_bytes.append(value.encode("utf-8"))  # str to bytes
                tuple_list_as_bytes.append((attribute_name, value_as_bytes))

            try:
                conn.add_s(dn, tuple_list_as_bytes)
                print("Import successful")
            except ldap.ALREADY_EXISTS:
                print("Entry already existing in your LDAP, ignoring ...")
            except Exception as err:
                print(f"Unable to import record due to {err}")

    except FileNotFoundError:
        print(f"Unable to locate {ldif_file}. Make sure the path is correct")
        sys.exit(1)
    except ValueError as err:
        print(f"Unable to read ldif. Make sure the LDIF was created with this utility. Error {err}")
        sys.exit(1)

    except Exception as err:
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        print(f"Unable to import LDAP environment due to {err}. {exc_type}, {fname}, {exc_tb.tb_lineno}")
        sys.exit(1)
    def import_ldif(self, bucket_mappings):
        ctx = prepare_template_ctx(self.manager)
        attr_processor = AttrProcessor()

        for _, mapping in bucket_mappings.items():
            for file_ in mapping["files"]:
                logger.info(f"Importing {file_} file")
                src = f"/app/templates/{file_}"
                dst = f"/app/tmp/{file_}"
                os.makedirs(os.path.dirname(dst), exist_ok=True)

                render_ldif(src, dst, ctx)

                with open(dst, "rb") as fd:
                    parser = LDIFParser(fd)

                    for dn, entry in parser.parse():
                        if len(entry) <= 2:
                            continue

                        key = id_from_dn(dn)
                        entry["dn"] = [dn]
                        entry = transform_entry(entry, attr_processor)
                        data = json.dumps(entry)

                        # using INSERT will cause duplication error, but the data is left intact
                        query = 'INSERT INTO `%s` (KEY, VALUE) VALUES ("%s", %s)' % (
                            mapping["bucket"], key, data)
                        req = self.client.exec_query(query)

                        if not req.ok:
                            logger.warning(
                                "Failed to execute query, reason={}".format(
                                    req.json()))
Beispiel #4
0
def parse_ldif_file(ldif_file: str) -> list[tuple[str, dict[str, Any]]]:
    logger.info(f"### Parsing LDIF file {ldif_file}")

    orig_ldif_fd = open(ldif_file, "rb")
    ldif_fd = BytesIO()
    for line in orig_ldif_fd.readlines():
        if line.startswith(b"# search result"):
            break
        ldif_fd.write(line)

    ldif_fd.seek(0)

    parser = LDIFParser(ldif_fd)
    return list(parser.parse())
Beispiel #5
0
    def import_ldif(self):
        optional_scopes = json.loads(self.manager.config.get("optional_scopes", "[]"))
        ldif_mappings = get_ldif_mappings(optional_scopes)

        # hybrid means only a subsets of ldif are needed
        persistence_type = os.environ.get("CN_PERSISTENCE_TYPE", "ldap")
        ldap_mapping = os.environ.get("CN_PERSISTENCE_LDAP_MAPPING", "default")
        if persistence_type == "hybrid":
            mapping = ldap_mapping
            ldif_mappings = {mapping: ldif_mappings[mapping]}

            # # these mappings require `base.ldif`
            # opt_mappings = ("user", "token",)

            # `user` mapping requires `o=gluu` which available in `base.ldif`
            # if mapping in opt_mappings and "base.ldif" not in ldif_mappings[mapping]:
            if "base.ldif" not in ldif_mappings[mapping]:
                ldif_mappings[mapping].insert(0, "base.ldif")

        ctx = prepare_template_ctx(self.manager)

        for mapping, files in ldif_mappings.items():
            self.check_indexes(mapping)

            for file_ in files:
                logger.info(f"Importing {file_} file")
                src = f"/app/templates/{file_}"
                dst = f"/app/tmp/{file_}"
                os.makedirs(os.path.dirname(dst), exist_ok=True)

                render_ldif(src, dst, ctx)

                with open(dst, "rb") as fd:
                    parser = LDIFParser(fd)
                    for dn, entry in parser.parse():
                        self.add_entry(dn, entry)
Beispiel #6
0
    in_file_ext = in_file_path.suffix
    if in_file_ext not in valid_ext:
        fatal_error("Error: Invalid input file!")

    if not in_file_path.exists() or in_file_path.is_dir():
        fatal_error("Error: Invalid input file path!")

    with in_file_path.open('rb') as f:
        if in_file_ext == '.ml':
            ml_bytes = f.read()
            ml = CscaMasterList.load(ml_bytes)
            verify_and_extract_masterlist(ml, 
                default_out_dir_csca.joinpath(get_ml_out_dir_name(ml))
            )
        else:
            parser = LDIFParser(f)
            print("Note: DSC and CRL won't be verified against issuing CSCA!")
            for dn, entry in parser.parse():

                # ML
                if 'CscaMasterListData' in entry:
                    ml = entry['CscaMasterListData'][0]
                    ml = CscaMasterList.load(ml)
                    verify_and_extract_masterlist(ml, 
                        default_out_dir_csca.joinpath(get_ml_out_dir_name(ml))
                    )

                # DSC
                elif 'userCertificate' in entry or 'userCertificate;binary' in entry:
                    dn = parse_dn(dn)
                    dsc = entry['userCertificate;binary'][0]
 def get_backup_data(self):
     print('Checking backup file {0}.'.format(self.args.backup_file))
     with gzip.open(self.args.backup_file, 'rb') as f:
         self.ldif_parser = LDIFParser(f)
         self.ldif_parser.handle = self.ldap_parser_handle
         self.ldif_parser.parse()