def import_data(file_path):
     temp_contacts = []
     parser = LDIFParser(open(file_path, 'rb'))
     for dn, entry in parser.parse():
         temp_contacts.append(
             Contact.Contact.from_dic(LdifInputOutput.build_contact(entry)))
     return temp_contacts
Exemplo n.º 2
0
def parse_ldif(myfile, newsletter, ignore_errors=False):
    """
    Parse addresses from LDIF file-object into newsletter.

    Returns a dictionary mapping email addresses into Subscription objects.
    """

    from ldif3 import LDIFParser

    address_list = AddressList(newsletter, ignore_errors)

    try:
        parser = LDIFParser(myfile)

        for dn, entry in parser.parse():
            if 'mail' in entry:
                email = entry['mail'][0]

                if 'cn' in entry:
                    name = entry['cn'][0]
                else:
                    name = None

                address_list.add(email, name)

            elif not ignore_errors:
                raise forms.ValidationError(
                    _("Some entries have no e-mail address."))

    except ValueError as e:
        if not ignore_errors:
            raise forms.ValidationError(e)

    return address_list.addresses
Exemplo n.º 3
0
def student_rollno(request):
    Start = 2014
    End = 2018
    for i in range(2):
        DEPT = 1
        parser = LDIFParser(open('data' + str(i + 1) + '.ldif', 'rb'))
        for dn, Entry in parser.parse():
            dn.split(',')
            props = dict(item.split("=") for item in dn.split(","))
            try:
                print(Entry["uid"], Entry["givenname"], Entry["sn"],
                      Entry["mail"], Entry["gecos"])
            except:
                DEPT = 2
                continue
            FName = Entry["givenname"][0]
            if (len(FName) > 30):
                FName = FName[:20]

            u = User.objects.get(username=Entry["uid"][0])
            p = Personnel.objects.get(LDAP_id=u.id)
            p.RollNumber = Entry["gecos"][0]
            p.save()
        Start += 1
        End += 1
Exemplo n.º 4
0
    def get_entries(self, filename):
        parser = LDIFParser(open(filename, 'rb'))

        self._phoneBook = fritzbox.phonebook.Phonebook()
        for dn, entry in parser.parse():
            self._handle(dn, entry)
        return self._phoneBook
Exemplo n.º 5
0
def parse_ldif(myfile, newsletter, ignore_errors=False):
    """
    Parse addresses from LDIF file-object into newsletter.

    Returns a dictionary mapping email addresses into Subscription objects.
    """

    from ldif3 import LDIFParser

    address_list = AddressList(newsletter, ignore_errors)

    try:
        parser = LDIFParser(myfile)

        for dn, entry in parser.parse():
            if 'mail' in entry:
                email = entry['mail'][0]

                if 'cn' in entry:
                    name = entry['cn'][0]
                else:
                    name = None

                address_list.add(email, name)

            elif not ignore_errors:
                raise forms.ValidationError(
                    _("Some entries have no e-mail address."))

    except ValueError as e:
        if not ignore_errors:
            raise forms.ValidationError(e)

    return address_list.addresses
Exemplo n.º 6
0
def student_users(request):
    Start = 2014
    End = 2018
    for i in range(2):
        DEPT = 1
        parser = LDIFParser(open('data' + str(i + 1) + '.ldif', 'rb'))
        for dn, Entry in parser.parse():
            dn.split(',')
            props = dict(item.split("=") for item in dn.split(","))
            try:
                print(Entry["uid"], Entry["givenname"], Entry["sn"],
                      Entry["mail"])
            except:
                DEPT = 2
                continue
            FName = Entry["givenname"][0]
            if (len(FName) > 30):
                FName = FName[:20]

            u = User.objects.create_user(username=Entry["uid"][0],
                                         password="******",
                                         first_name=FName,
                                         last_name=Entry["sn"][0],
                                         email=Entry["mail"][0])
            p = Personnel(Dept_id=DEPT, LDAP_id=u.id, Role_id=2)
            p.save()
            q = Student_Period(Student_ID_id=p.Person_ID,
                               Start_Year=Start,
                               End_Year=End)
            q.save()
        Start += 1
        End += 1
Exemplo n.º 7
0
    def get_entries(self, args):
        parser = LDIFParser(open(args.input, 'rb'))

        self.date = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S +0000")
        self.entries = []

        for dn, entry in parser.parse():
            self._handle(dn, entry)

        return self.entries
Exemplo n.º 8
0
    def get_entries(self, filename):
        parser = LDIFParser(open(filename, 'rb'))

        self._vipGroups = {}
        for g in self._vipGroupNames:
            self._vipGroups[g] = []

        for dn, entry in parser.parse():
            self._handle(dn, entry)

        return self._vipGroups
Exemplo n.º 9
0
def faculty_users(request):
	parser = LDIFParser(open('data.ldif', 'rb'))
	i=0
	for dn, Entry in parser.parse():
		dn.split(',')
		props=dict(item.split("=") for item in dn.split(","))
		#print('got entry record: %s' % dn)
		#print(props)
		#pprint(Entry)
		print(Entry["uid"],Entry["givenname"],Entry["sn"],Entry["mail"])
		u=User.objects.create_user(username=Entry["uid"][0],password="******",first_name=Entry["givenname"][0],last_name=Entry["sn"][0],email=Entry["mail"][0])
		p=Personnel(Dept_id=1,LDAP_id=u.id,Role_id=3)
		p.save()
Exemplo n.º 10
0
 def is_invalid_ldif(filehandle):
     try:
         parser = LDIFParser(filehandle)
         # returns a generator so step through to force processing and trigger exception
         for _dn, entry in parser.parse():
             log.debug('got entry record: %s', _dn)
             # looks the same
             #log.debug('%s', pformat(entry))
             log.debug('%s', entry)
         log.debug('%s records read', parser.records_read)
         if not parser.records_read:
             raise ValueError('no records read')
         return False
     except ValueError as _:
         log.debug('ValueError: %s', _)
         return _
Exemplo n.º 11
0
    def import_ldif(self, bucket_mappings):
        ctx = prepare_template_ctx(self.manager)
        attr_processor = AttrProcessor()

        for _, mapping in bucket_mappings.iteritems():
            for file_ in mapping["files"]:
                src = "/app/templates/ldif/{}".format(file_)
                dst = "/app/tmp/{}".format(file_)
                render_ldif(src, dst, ctx)
                parser = LDIFParser(open(dst))

                query_file = "/app/tmp/{}.n1ql".format(file_)

                with open(query_file, "a+") as f:
                    for dn, entry in parser.parse():
                        if len(entry) <= 2:
                            continue

                        key = get_key_from(dn)
                        entry["dn"] = [dn]
                        entry = transform_entry(entry, attr_processor)
                        data = json.dumps(entry)
                        # using INSERT will cause duplication error, but the data is left intact
                        query = 'INSERT INTO `%s` (KEY, VALUE) VALUES ("%s", %s);\n' % (
                            mapping["bucket"], key, data)
                        f.write(query)

                # exec query
                logger.info(
                    "Importing {} file into {} bucket (if needed)".format(
                        file_, mapping["bucket"]))
                with open(query_file) as f:
                    for line in f:
                        query = line.strip()
                        if not query:
                            continue

                        req = self.client.exec_query(query)
                        if not req.ok:
                            logger.warn(
                                "Failed to execute query, reason={}".format(
                                    req.json()))
Exemplo n.º 12
0
    def parseDscCrlFile(self, dscCrlFile, connection: Connection):
        """Parsing DSC/CRL file"""
        parser = LDIFParser(dscCrlFile)
        for dn, entry in parser.parse():
            if 'userCertificate;binary' in entry:
                countryCode = re.findall(r'[c,C]{1}=(.*)(,dc=data){1}',
                                         dn)[0][0]
                dsc = x509.Certificate.load(*entry['userCertificate;binary'])
                #parse to our object
                dsc.__class__ = DocumentSignerCertificate
                dsc.__init__()
                #write to DB
                writeToDB_DSC(dsc, countryCode, connection)

            if 'certificateRevocationList;binary' in entry:
                countryCode = re.findall(r'[c,C]{1}=(.*)(,dc=data){1}',
                                         dn)[0][0]
                revocationList = crl.CertificateList.load(
                    *entry['certificateRevocationList;binary'])
                #parse to our object
                revocationList.__class__ = CertificateRevocationList
                revocationList.__init__()
                #write to DB
                writeToDB_CRL(revocationList, countryCode, connection)
Exemplo n.º 13
0
def import_ldif():
    ldif_mappings = {
        "default": [
            "base.ldif",
            "attributes.ldif",
            "scopes.ldif",
            "scripts.ldif",
            "configuration.ldif",
            "scim.ldif",
            "oxidp.ldif",
            "oxtrust_api.ldif",
            "passport.ldif",
            "oxpassport-config.ldif",
            "gluu_radius_base.ldif",
            "gluu_radius_server.ldif",
            "clients.ldif",
            "oxtrust_api_clients.ldif",
            "scim_clients.ldif",
            "o_metric.ldif",
            "gluu_radius_clients.ldif",
            "passport_clients.ldif",
            "scripts_casa.ldif",
        ]
    }
    exitCode = 0
    for file_ in ldif_mappings["default"]:
        print("Checking {} file...".format(file_))
        src = "./templates/ldif/{}".format(file_)
        try:
            parser = LDIFParser(open(src))
            print("Successfully tested", file_)
        except:
            traceback.print_exc()
            print(file_, " validation failed")
            exitCode = -1
        print("--------------------")
    return exitCode
Exemplo n.º 14
0
#os.remove(temp_file)
fh = open(temp_file, "w")
fh.write(ldapsearch_res)
fh.close()

LDAP_USER_FORMAT = '''-----------------------------------------
Logon Name: {logon_name}
UPN: {user_principal_name}
Full Name: {full_name}
First Name: {first_name}
Last Name: {last_name}
Email: {email}
Display Name: {display_name}
Groups: {groups}'''

parser = LDIFParser(open(temp_file, 'rb'))
print("Domain users:")
for dn, entry in parser.parse():
    #print('got entry record: %s' % dn)
    #pprint(entry)
    groups = {}
    logon_name = getFirstOrEmpty(entry.get('sAMAccountName', []))
    user_principal_name = getFirstOrEmpty(entry.get('userPrincipalName', []))
    full_name = getFirstOrEmpty(entry.get('name', []))
    first_name = getFirstOrEmpty(entry.get('givenName', []))
    last_name = getFirstOrEmpty(entry.get('sn', []))
    email = getFirstOrEmpty(entry.get('mail', []))
    display_name = getFirstOrEmpty(entry.get('displayName', []))
    groups = entry.get('memberOf', [])
    print(
        LDAP_USER_FORMAT.format(
Exemplo n.º 15
0
            retry = False
        except ResponseNotReady:
            logging.info("Google service not ready, retrying")

    # Raw user list
    users = service.users().list(customer="my_customer").execute()['users']
    # List of existing users (emails)
    g_users = [u['primaryEmail'] for u in users]
    logging.info("Google accounts list retrieved {}".format(g_users))
    protected_accounts = [
        acc + "@" + DOMAIN for acc in config['protected_accounts']
    ]
    logging.info("Protected accounts list : {}".format(g_users))

    # Parsing stdin as ldif
    parsed = LDIFParser(sys.stdin).parse()
    datas = parsed.next()
    data_dict = datas[1]

    account_name = data_dict.get("sAMAccountName", False)
    if (account_name):
        account_name = account_name[0]
        primaryEmail = account_name + "@" + DOMAIN
        # if no givenName, we use account_name
        givenName = data_dict.get('givenName', account_name)
        # if no familyName, we use account_name
        familyName = data_dict.get('sn', account_name)

    passwd = data_dict.get("virtualCryptSHA512", False)
    if (passwd):
        passwd = passwd[0]
Exemplo n.º 16
0
    def parseCSCAFile(self, CSCAFile, connection: Connection):
        """Parsing CSCA file"""
        parser = LDIFParser(CSCAFile)
        for dn, entry in parser.parse():
            if 'CscaMasterListData' in entry:
                ml = CscaMasterList()
                masterList = ml.load(*entry['CscaMasterListData'])
                try:
                    # verify masterlist - if failed it returns exception
                    masterList.verify()
                except Exception as e:
                    self._log.error(
                        "Integrity verification failed for master list issued by {}."
                        .format(masterList.signerCertificates[0].subject.
                                native['country_name']))

                cscas = {}
                skipped_cscas = []
                for csca in masterList.cscaList:
                    if csca.key_identifier not in cscas:
                        cscas[csca.key_identifier] = csca

                    if csca.self_signed != 'maybe':
                        if csca.authority_key_identifier not in cscas:
                            skipped_cscas.append(csca)
                            continue
                        issuing_cert = cscas[csca.authority_key_identifier]
                    else:
                        issuing_cert = csca

                    self.verifyCSCAandWrite(csca, issuing_cert, connection)

                for csca in skipped_cscas:
                    issuer_cert = get_issuer_cert(csca, cscas)
                    if issuer_cert is None:
                        self._log.error(
                            "Could not verify signature of CSCA C={} SerNo={}. Issuing CSCA not found! The CSCA is skipped and not stored in database."
                            .format(
                                csca.subject.native['country_name'],
                                hex(csca.serial_number).rstrip("L").lstrip(
                                    "0x")))
                    else:
                        self.verifyCSCAandWrite(csca, issuer_cert, connection)

                    # verify master list signer certificates
                for mlsig_cert in masterList.signerCertificates:
                    issuer_cert = get_issuer_cert(mlsig_cert, cscas)
                    if issuer_cert is None:
                        self._log.info(
                            "Could not verify signature of master list signer certificate. Issuing CSCA not found! [C={} Ml-Sig-SerNo={}]"
                            .format(
                                mlsig_cert.subject.native['country_name'],
                                hex(mlsig_cert.serial_number).rstrip(
                                    "L").lstrip("0x")))
                    else:
                        try:
                            mlsig_cert.verify(issuer_cert)
                        except Exception as e:
                            self._log.info(
                                "Failed to verify master list signer C={} Ml-Sig-SerNo={}\n\treason: {}"
                                .format(
                                    mlsig_cert.subject.native['country_name'],
                                    hex(mlsig_cert.serial_number).rstrip(
                                        "L").lstrip("0x"), str(e)))
Exemplo n.º 17
0
from asn1crypto import crl, x509
import re

#from database.storage.DSC import CertX509
from pymrtd.pki.crl import writeToDB, readFromDB

from pymrtd.pki.crl import CertificateRevocationList

from database import Connection

conn = Connection("nejko", "nejko", "icao")


certificateList = {}
revocationList = {}
parser = LDIFParser(open('C://Users/nejko/Desktop/ZeroPass/B1/random/parseCSCAandCRL/database/icaopkd-001-dsccrl-003749.ldif', 'rb'))
for dn, entry in parser.parse():
    if 'userCertificate;binary' in entry:
        countryCode = re.findall(r'[c,C]{1}=(.*)(,dc=database){1}', dn)[0][0]
        cert = x509.Certificate.load(*entry['userCertificate;binary'])
        if countryCode not in certificateList:
            certificateList[countryCode] = {}
        certificateList[countryCode][cert.serial_number] = cert

    if 'certificateRevocationList;binary' in entry:
        countryCode = re.findall(r'[c,C]{1}=(.*)(,dc=database){1}', dn)[0][0]
        ##revocationList[countryCode] = x509.load_der_x509_crl(*entry['certificateRevocationList;binary'], default_backend())
        revocationList[countryCode] = crl.CertificateList.load(*entry['certificateRevocationList;binary'])
        revocationListInObject = revocationList[countryCode]
        #revocationListInObject1 = CertificateRevocationList(revocationListInObject)
        revocationListInObject.__class__ = CertificateRevocationList
Exemplo n.º 18
0
#!/usr/bin/env python
from ldif3 import LDIFParser
from pprint import pprint
import detect

# This script uses 'keys.ldif' as input LDIF file AND expect `sshPublicKey` as attribute for SSH public keys

parser = LDIFParser(open('keys.ldif', 'rb'))
vuln = 0
for dn, entry in parser.parse():
    if not entry.has_key('sshPublicKey'):
        continue
    for i in entry['sshPublicKey']:
        if detect.detect(dn, i):
            vuln = vuln+1
print('SUMMARY', 'Found {} vulnerable keys'.format(vuln))
Exemplo n.º 19
0
# Edit these variables based on the hash and org config
api_token = '004cSmv2L5UFSNoSOde3mkWpr0BkWBdzR1KJO6wKxf'
tenant = 'https://telemann2.oktapreview.com'
salt_order = 'POSTFIX'
hash_algorithm = 'SHA-1'

# Don't change these
url = '/api/v1/users'
request_headers = {
    'Accept': 'application/json',
    'Content-Type': 'application/json',
    'Authorization': 'SSWS ' + api_token
}

parser = LDIFParser(open('/Users/karmenlei/Downloads/sha1.entry.ldif', 'rb'))
for dn, entry in parser.parse():
    print('got entry record: %s' % dn)
    print(entry['userPassword'][0])
    password_str = entry['userPassword'][0]
    # strip off the {SHA} prefix
    password_len = len(password_str)
    password_str = password_str[5:password_len]
    print('Actual password hashed value: ' + password_str)
    # populate the JSON payload with the user profile
    payload = {
                "profile": {
                    "firstName": entry['givenName'][0],
                    "lastName": entry['sn'][0],
                    "email": entry['mail'][0],
                    "login": entry['mail'][0],
Exemplo n.º 20
0
    def import_ldif(self):
        ldif_mappings = {
            "default": [
                "base.ldif",
                "attributes.ldif",
                "scopes.ldif",
                "scripts.ldif",
                "configuration.ldif",
                "scim.ldif",
                "oxidp.ldif",
                "oxtrust_api.ldif",
                "passport.ldif",
                "oxpassport-config.ldif",
                "gluu_radius_base.ldif",
                "gluu_radius_server.ldif",
                "clients.ldif",
                "oxtrust_api_clients.ldif",
                "scim_clients.ldif",
                "o_metric.ldif",
                "gluu_radius_clients.ldif",
                "passport_clients.ldif",
                "scripts_casa.ldif",
            ],
            "user": [
                "people.ldif",
                "groups.ldif",
            ],
            "site": [
                "o_site.ldif",
            ],
            "cache": [],
            "token": [],
        }

        # hybrid means only a subsets of ldif are needed
        if GLUU_PERSISTENCE_TYPE == "hybrid":
            mapping = GLUU_PERSISTENCE_LDAP_MAPPING
            ldif_mappings = {mapping: ldif_mappings[mapping]}

            # # these mappings require `base.ldif`
            # opt_mappings = ("user", "token",)

            # `user` mapping requires `o=gluu` which available in `base.ldif`
            # if mapping in opt_mappings and "base.ldif" not in ldif_mappings[mapping]:
            if "base.ldif" not in ldif_mappings[mapping]:
                ldif_mappings[mapping].insert(0, "base.ldif")

        ctx = prepare_template_ctx(self.manager)

        for mapping, files in ldif_mappings.iteritems():
            self.check_indexes(mapping)

            for file_ in files:
                logger.info("Importing {} file".format(file_))
                src = "/app/templates/ldif/{}".format(file_)
                dst = "/app/tmp/{}".format(file_)
                render_ldif(src, dst, ctx)

                parser = LDIFParser(open(dst))
                for dn, entry in parser.parse():
                    self.add_entry(dn, entry)
Exemplo n.º 21
0
# Edit these variables based on the hash and org config
api_token = '004cSmv2L5UFSNoSOde3mkWpr0BkWBdzR1KJO6wKxf'
tenant = 'https://telemann2.oktapreview.com'
salt_order = 'POSTFIX'
hash_algorithm = 'SHA-512'

# Don't change these
url = '/api/v1/users'
request_headers = {
    'Accept': 'application/json',
    'Content-Type': 'application/json',
    'Authorization': 'SSWS ' + api_token
}

parser = LDIFParser(open('/Users/karmenlei/Downloads/dummytest45.ldif', 'rb'))
for dn, entry in parser.parse():
    print('got entry record: %s' % dn)
    pprint(entry['userPassword'][0])
    password_str = entry['userPassword'][0]
    password_len = len(password_str)
    # Need to strip off the {SSHA512} prefix
    password_str = password_str[9:password_len]
    print('Revised password value: ' + password_str)
    password_base64_decoded = base64.b64decode(password_str)
    # Password length of a SSHA512 hash is always 64 bytes long
    # Subtract 64 from the hash will give the length of the salt
    salt = password_base64_decoded[64:len(password_base64_decoded)]
    print('salt:' + salt.hex())
    print('salt base64 encoded: ' + base64.b64encode(salt).decode("utf-8"))
    password_hash = password_base64_decoded[0:64]
Exemplo n.º 22
0
        try:
            if not entry.get("mail"):
                pass
            message = parse_and_send_email(record=entry, dn=dn)
            sent_list.append({",".join(entry.get("mail")): message})
            # time.sleep(10)
        except Exception as e:
            error.append(e)

    send_os_mail(MASTER_MAIL, data=sent_list, subject="Report")
    # time.sleep(10)
    if error:
        send_os_mail(MASTER_MAIL, data=error, subject="Failed To Send Email")


def create_arg_parser():
    parser = argparse.ArgumentParser(description="Description of send mail script")
    parser.add_argument(
        "-inputDirectory", "-fp", help="Path to the ldif file directory."
    )

    return parser


if __name__ == "__main__":
    arg_parser = create_arg_parser()
    parsed_args = arg_parser.parse_args(sys.argv[1:])
    if os.path.exists(parsed_args.inputDirectory):
        parser = LDIFParser(open(parsed_args.inputDirectory, "rb"))
        read_file_and_send_email(parser=parser)
Exemplo n.º 23
0
def main(self):

    parser = argparse.ArgumentParser()

    parser.add_argument("-f",
                        "--ldiffile",
                        type=str,
                        dest='ldiffile',
                        help="LDIF File")
    parser.add_argument("-b",
                        "--database",
                        type=str,
                        dest='dbname',
                        help="DB Name")
    parser.add_argument("--inject", help="Inject Data", action="store_true")
    parser.add_argument("--export", help="Export Data", action="store_true")
    parser.add_argument("--user", help="User Import", action="store_true")
    parser.add_argument("--group", help="Group Import", action="store_true")

    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(1)
    args = parser.parse_args()

    ### Connect to Database
    client = MongoClient()
    db = client[args.dbname]
    UserCollection = db['Users']
    GroupCollection = db['Groups']

    if (args.inject):
        if (args.user):
            ldifparser = LDIFParser(open(args.ldiffile, 'rb'))
            user = {}
            for dn, entry in ldifparser.parse():
                #print ('got entry record : %s' % dn)
                #print (dn)
                cn = ""
                mail = ""
                uid = ""

                for element in entry.items():
                    ### Get the CN
                    if (element[0] in "cn"):
                        cn = element[1][0]

                    ### Get the mail
                    if (element[0] in "mail"):
                        mail = element[1][0]

                    ### Get the UID
                    if (element[0] in "uid"):
                        uid = element[1][0]

                if (cn != mail):
                    #print ({ 'cn' : cn, 'mail' : mail, 'uid' : uid})
                    insertLineToDB(
                        {
                            'dn': dn,
                            'cn': cn,
                            'mail': mail,
                            'uid': uid
                        }, UserCollection)
                else:
                    print("Found " + cn + " in " + mail + "for :" + dn)

        if (args.group):
            ldifparser = LDIFParser(open(args.ldiffile, 'rb'))
            for dn, entry in ldifparser.parse():
                #print ('got entry record : %s' % dn)
                cn = ""
                member = []

                for element in entry.items():
                    ### Get the CN
                    if (element[0] in "cn"):
                        cn = element[1][0]

                    ### Get the mail
                    if (element[0] in "member"):
                        #                        print (element[1])
                        member.append(element[1])

                print({'cn': cn, 'member': member})
                insertLineToDB({
                    'dn': dn,
                    'cn': cn,
                    'member': member
                }, GroupCollection)

    #insertLineToDB(user, UserCollection)

    if (args.export):
        pd.DataFrame(list(UserCollection.find())).to_csv("userexport.csv")
Exemplo n.º 24
0
def connection(**kwargs):
    """
    Creates and returns a connection to the LDAP server.

    The user identifier, if given, should be keyword arguments matching the fields
    in settings.LDAP_AUTH_USER_LOOKUP_FIELDS, plus a `password` argument.
    """
    # Format the DN for the username.
    format_username = import_func(settings.LDAP_AUTH_FORMAT_USERNAME)
    kwargs = {key: value for key, value in kwargs.items() if value}
    username = None
    password = None
    if kwargs:
        password = kwargs.pop("password")
        username = format_username(kwargs)
    # Configure the connection.
    if settings.LDAP_AUTH_USE_TLS:
        auto_bind = ldap3.AUTO_BIND_TLS_BEFORE_BIND
    else:
        auto_bind = ldap3.AUTO_BIND_NO_TLS

    # Connect.
    try:
        assert settings.LDAP_AUTH_LDIF
        if settings.LDAP_AUTH_LDIF:
            c = ldap3.Connection(
                "mock_server",
                user=username,
                password=password,
                client_strategy=ldap3.MOCK_SYNC,
                raise_exceptions=True,
            )
            # Load entries from LDIF file
            parser = LDIFParser(open(settings.LDAP_AUTH_LDIF, "rb"))
            for dn, entry in parser.parse():
                c.strategy.add_entry(dn, entry)
            # Per https://ldap3.readthedocs.io/mocking.html:
            # "You cannot use the auto_bind parameter because the DIT is populated after the creation of the Connection object."
            # Bind manually
            c.bind()
        else:
            c = ldap3.Connection(
                ldap3.Server(
                    settings.LDAP_AUTH_URL,
                    allowed_referral_hosts=[("*", True)],
                    get_info=ldap3.NONE,
                    connect_timeout=settings.LDAP_AUTH_CONNECT_TIMEOUT,
                ),
                user=username,
                password=password,
                auto_bind=auto_bind,
                raise_exceptions=True,
                receive_timeout=settings.LDAP_AUTH_RECEIVE_TIMEOUT,
            )
    except LDAPException as ex:
        logger.warning("LDAP connect failed: {ex}".format(ex=ex))
        yield None
        return
    # If the settings specify an alternative username and password for querying, rebind as that.
    if ((settings.LDAP_AUTH_CONNECTION_USERNAME
         or settings.LDAP_AUTH_CONNECTION_PASSWORD)
            and (settings.LDAP_AUTH_CONNECTION_USERNAME != username
                 or settings.LDAP_AUTH_CONNECTION_PASSWORD != password)):
        User = get_user_model()
        try:
            c.rebind(
                user=format_username({
                    User.USERNAME_FIELD:
                    settings.LDAP_AUTH_CONNECTION_USERNAME
                }),
                password=settings.LDAP_AUTH_CONNECTION_PASSWORD,
            )
        except LDAPException as ex:
            logger.warning("LDAP rebind failed: {ex}".format(ex=ex))
            yield None
            return
    # Return the connection.
    logger.info("LDAP connect succeeded")
    try:
        yield Connection(c)
    finally:
        c.unbind()
Exemplo n.º 25
0
    in_file_ext = in_file_path.suffix
    if in_file_ext not in valid_ext:
        fatal_error("Error: Invalid input file!")

    if not in_file_path.exists() or in_file_path.is_dir():
        fatal_error("Error: Invalid input file path!")

    with in_file_path.open('rb') as f:
        if in_file_ext == '.ml':
            ml_bytes = f.read()
            ml = CscaMasterList.load(ml_bytes)
            verify_and_extract_masterlist(ml, 
                default_out_dir_csca.joinpath(get_ml_out_dir_name(ml))
            )
        else:
            parser = LDIFParser(f)
            print("Note: DSC and CRL won't be verified against issuing CSCA!")
            for dn, entry in parser.parse():

                # ML
                if 'CscaMasterListData' in entry:
                    ml = entry['CscaMasterListData'][0]
                    ml = CscaMasterList.load(ml)
                    verify_and_extract_masterlist(ml, 
                        default_out_dir_csca.joinpath(get_ml_out_dir_name(ml))
                    )

                # DSC
                elif 'userCertificate' in entry or 'userCertificate;binary' in entry:
                    dn = parse_dn(dn)
                    dsc = entry['userCertificate;binary'][0]