Exemplo n.º 1
0
def parse_ldif(myfile, newsletter, ignore_errors=False):
    """
    Parse addresses from LDIF file-object into newsletter.

    Returns a dictionary mapping email addresses into Subscription objects.
    """

    from ldif3 import LDIFParser

    address_list = AddressList(newsletter, ignore_errors)

    try:
        parser = LDIFParser(myfile)

        for dn, entry in parser.parse():
            if 'mail' in entry:
                email = entry['mail'][0]

                if 'cn' in entry:
                    name = entry['cn'][0]
                else:
                    name = None

                address_list.add(email, name)

            elif not ignore_errors:
                raise forms.ValidationError(
                    _("Some entries have no e-mail address."))

    except ValueError as e:
        if not ignore_errors:
            raise forms.ValidationError(e)

    return address_list.addresses
Exemplo n.º 2
0
def parse_ldif(myfile, newsletter, ignore_errors=False):
    """
    Parse addresses from LDIF file-object into newsletter.

    Returns a dictionary mapping email addresses into Subscription objects.
    """

    from ldif3 import LDIFParser

    address_list = AddressList(newsletter, ignore_errors)

    try:
        parser = LDIFParser(myfile)

        for dn, entry in parser.parse():
            if 'mail' in entry:
                email = entry['mail'][0]

                if 'cn' in entry:
                    name = entry['cn'][0]
                else:
                    name = None

                address_list.add(email, name)

            elif not ignore_errors:
                raise forms.ValidationError(
                    _("Some entries have no e-mail address."))

    except ValueError as e:
        if not ignore_errors:
            raise forms.ValidationError(e)

    return address_list.addresses
 def import_data(file_path):
     temp_contacts = []
     parser = LDIFParser(open(file_path, 'rb'))
     for dn, entry in parser.parse():
         temp_contacts.append(
             Contact.Contact.from_dic(LdifInputOutput.build_contact(entry)))
     return temp_contacts
Exemplo n.º 4
0
def student_rollno(request):
    Start = 2014
    End = 2018
    for i in range(2):
        DEPT = 1
        parser = LDIFParser(open('data' + str(i + 1) + '.ldif', 'rb'))
        for dn, Entry in parser.parse():
            dn.split(',')
            props = dict(item.split("=") for item in dn.split(","))
            try:
                print(Entry["uid"], Entry["givenname"], Entry["sn"],
                      Entry["mail"], Entry["gecos"])
            except:
                DEPT = 2
                continue
            FName = Entry["givenname"][0]
            if (len(FName) > 30):
                FName = FName[:20]

            u = User.objects.get(username=Entry["uid"][0])
            p = Personnel.objects.get(LDAP_id=u.id)
            p.RollNumber = Entry["gecos"][0]
            p.save()
        Start += 1
        End += 1
Exemplo n.º 5
0
    def get_entries(self, filename):
        parser = LDIFParser(open(filename, 'rb'))

        self._phoneBook = fritzbox.phonebook.Phonebook()
        for dn, entry in parser.parse():
            self._handle(dn, entry)
        return self._phoneBook
Exemplo n.º 6
0
def student_users(request):
    Start = 2014
    End = 2018
    for i in range(2):
        DEPT = 1
        parser = LDIFParser(open('data' + str(i + 1) + '.ldif', 'rb'))
        for dn, Entry in parser.parse():
            dn.split(',')
            props = dict(item.split("=") for item in dn.split(","))
            try:
                print(Entry["uid"], Entry["givenname"], Entry["sn"],
                      Entry["mail"])
            except:
                DEPT = 2
                continue
            FName = Entry["givenname"][0]
            if (len(FName) > 30):
                FName = FName[:20]

            u = User.objects.create_user(username=Entry["uid"][0],
                                         password="******",
                                         first_name=FName,
                                         last_name=Entry["sn"][0],
                                         email=Entry["mail"][0])
            p = Personnel(Dept_id=DEPT, LDAP_id=u.id, Role_id=2)
            p.save()
            q = Student_Period(Student_ID_id=p.Person_ID,
                               Start_Year=Start,
                               End_Year=End)
            q.save()
        Start += 1
        End += 1
Exemplo n.º 7
0
    def get_entries(self, args):
        parser = LDIFParser(open(args.input, 'rb'))

        self.date = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S +0000")
        self.entries = []

        for dn, entry in parser.parse():
            self._handle(dn, entry)

        return self.entries
Exemplo n.º 8
0
    def get_entries(self, filename):
        parser = LDIFParser(open(filename, 'rb'))

        self._vipGroups = {}
        for g in self._vipGroupNames:
            self._vipGroups[g] = []

        for dn, entry in parser.parse():
            self._handle(dn, entry)

        return self._vipGroups
Exemplo n.º 9
0
def faculty_users(request):
	parser = LDIFParser(open('data.ldif', 'rb'))
	i=0
	for dn, Entry in parser.parse():
		dn.split(',')
		props=dict(item.split("=") for item in dn.split(","))
		#print('got entry record: %s' % dn)
		#print(props)
		#pprint(Entry)
		print(Entry["uid"],Entry["givenname"],Entry["sn"],Entry["mail"])
		u=User.objects.create_user(username=Entry["uid"][0],password="******",first_name=Entry["givenname"][0],last_name=Entry["sn"][0],email=Entry["mail"][0])
		p=Personnel(Dept_id=1,LDAP_id=u.id,Role_id=3)
		p.save()
Exemplo n.º 10
0
 def is_invalid_ldif(filehandle):
     try:
         parser = LDIFParser(filehandle)
         # returns a generator so step through to force processing and trigger exception
         for _dn, entry in parser.parse():
             log.debug('got entry record: %s', _dn)
             # looks the same
             #log.debug('%s', pformat(entry))
             log.debug('%s', entry)
         log.debug('%s records read', parser.records_read)
         if not parser.records_read:
             raise ValueError('no records read')
         return False
     except ValueError as _:
         log.debug('ValueError: %s', _)
         return _
Exemplo n.º 11
0
    def import_ldif(self, bucket_mappings):
        ctx = prepare_template_ctx(self.manager)
        attr_processor = AttrProcessor()

        for _, mapping in bucket_mappings.iteritems():
            for file_ in mapping["files"]:
                src = "/app/templates/ldif/{}".format(file_)
                dst = "/app/tmp/{}".format(file_)
                render_ldif(src, dst, ctx)
                parser = LDIFParser(open(dst))

                query_file = "/app/tmp/{}.n1ql".format(file_)

                with open(query_file, "a+") as f:
                    for dn, entry in parser.parse():
                        if len(entry) <= 2:
                            continue

                        key = get_key_from(dn)
                        entry["dn"] = [dn]
                        entry = transform_entry(entry, attr_processor)
                        data = json.dumps(entry)
                        # using INSERT will cause duplication error, but the data is left intact
                        query = 'INSERT INTO `%s` (KEY, VALUE) VALUES ("%s", %s);\n' % (
                            mapping["bucket"], key, data)
                        f.write(query)

                # exec query
                logger.info(
                    "Importing {} file into {} bucket (if needed)".format(
                        file_, mapping["bucket"]))
                with open(query_file) as f:
                    for line in f:
                        query = line.strip()
                        if not query:
                            continue

                        req = self.client.exec_query(query)
                        if not req.ok:
                            logger.warn(
                                "Failed to execute query, reason={}".format(
                                    req.json()))
Exemplo n.º 12
0
    def parseDscCrlFile(self, dscCrlFile, connection: Connection):
        """Parsing DSC/CRL file"""
        parser = LDIFParser(dscCrlFile)
        for dn, entry in parser.parse():
            if 'userCertificate;binary' in entry:
                countryCode = re.findall(r'[c,C]{1}=(.*)(,dc=data){1}',
                                         dn)[0][0]
                dsc = x509.Certificate.load(*entry['userCertificate;binary'])
                #parse to our object
                dsc.__class__ = DocumentSignerCertificate
                dsc.__init__()
                #write to DB
                writeToDB_DSC(dsc, countryCode, connection)

            if 'certificateRevocationList;binary' in entry:
                countryCode = re.findall(r'[c,C]{1}=(.*)(,dc=data){1}',
                                         dn)[0][0]
                revocationList = crl.CertificateList.load(
                    *entry['certificateRevocationList;binary'])
                #parse to our object
                revocationList.__class__ = CertificateRevocationList
                revocationList.__init__()
                #write to DB
                writeToDB_CRL(revocationList, countryCode, connection)
Exemplo n.º 13
0
def connection(**kwargs):
    """
    Creates and returns a connection to the LDAP server.

    The user identifier, if given, should be keyword arguments matching the fields
    in settings.LDAP_AUTH_USER_LOOKUP_FIELDS, plus a `password` argument.
    """
    # Format the DN for the username.
    format_username = import_func(settings.LDAP_AUTH_FORMAT_USERNAME)
    kwargs = {key: value for key, value in kwargs.items() if value}
    username = None
    password = None
    if kwargs:
        password = kwargs.pop("password")
        username = format_username(kwargs)
    # Configure the connection.
    if settings.LDAP_AUTH_USE_TLS:
        auto_bind = ldap3.AUTO_BIND_TLS_BEFORE_BIND
    else:
        auto_bind = ldap3.AUTO_BIND_NO_TLS

    # Connect.
    try:
        assert settings.LDAP_AUTH_LDIF
        if settings.LDAP_AUTH_LDIF:
            c = ldap3.Connection(
                "mock_server",
                user=username,
                password=password,
                client_strategy=ldap3.MOCK_SYNC,
                raise_exceptions=True,
            )
            # Load entries from LDIF file
            parser = LDIFParser(open(settings.LDAP_AUTH_LDIF, "rb"))
            for dn, entry in parser.parse():
                c.strategy.add_entry(dn, entry)
            # Per https://ldap3.readthedocs.io/mocking.html:
            # "You cannot use the auto_bind parameter because the DIT is populated after the creation of the Connection object."
            # Bind manually
            c.bind()
        else:
            c = ldap3.Connection(
                ldap3.Server(
                    settings.LDAP_AUTH_URL,
                    allowed_referral_hosts=[("*", True)],
                    get_info=ldap3.NONE,
                    connect_timeout=settings.LDAP_AUTH_CONNECT_TIMEOUT,
                ),
                user=username,
                password=password,
                auto_bind=auto_bind,
                raise_exceptions=True,
                receive_timeout=settings.LDAP_AUTH_RECEIVE_TIMEOUT,
            )
    except LDAPException as ex:
        logger.warning("LDAP connect failed: {ex}".format(ex=ex))
        yield None
        return
    # If the settings specify an alternative username and password for querying, rebind as that.
    if ((settings.LDAP_AUTH_CONNECTION_USERNAME
         or settings.LDAP_AUTH_CONNECTION_PASSWORD)
            and (settings.LDAP_AUTH_CONNECTION_USERNAME != username
                 or settings.LDAP_AUTH_CONNECTION_PASSWORD != password)):
        User = get_user_model()
        try:
            c.rebind(
                user=format_username({
                    User.USERNAME_FIELD:
                    settings.LDAP_AUTH_CONNECTION_USERNAME
                }),
                password=settings.LDAP_AUTH_CONNECTION_PASSWORD,
            )
        except LDAPException as ex:
            logger.warning("LDAP rebind failed: {ex}".format(ex=ex))
            yield None
            return
    # Return the connection.
    logger.info("LDAP connect succeeded")
    try:
        yield Connection(c)
    finally:
        c.unbind()
Exemplo n.º 14
0
    def parseCSCAFile(self, CSCAFile, connection: Connection):
        """Parsing CSCA file"""
        parser = LDIFParser(CSCAFile)
        for dn, entry in parser.parse():
            if 'CscaMasterListData' in entry:
                ml = CscaMasterList()
                masterList = ml.load(*entry['CscaMasterListData'])
                try:
                    # verify masterlist - if failed it returns exception
                    masterList.verify()
                except Exception as e:
                    self._log.error(
                        "Integrity verification failed for master list issued by {}."
                        .format(masterList.signerCertificates[0].subject.
                                native['country_name']))

                cscas = {}
                skipped_cscas = []
                for csca in masterList.cscaList:
                    if csca.key_identifier not in cscas:
                        cscas[csca.key_identifier] = csca

                    if csca.self_signed != 'maybe':
                        if csca.authority_key_identifier not in cscas:
                            skipped_cscas.append(csca)
                            continue
                        issuing_cert = cscas[csca.authority_key_identifier]
                    else:
                        issuing_cert = csca

                    self.verifyCSCAandWrite(csca, issuing_cert, connection)

                for csca in skipped_cscas:
                    issuer_cert = get_issuer_cert(csca, cscas)
                    if issuer_cert is None:
                        self._log.error(
                            "Could not verify signature of CSCA C={} SerNo={}. Issuing CSCA not found! The CSCA is skipped and not stored in database."
                            .format(
                                csca.subject.native['country_name'],
                                hex(csca.serial_number).rstrip("L").lstrip(
                                    "0x")))
                    else:
                        self.verifyCSCAandWrite(csca, issuer_cert, connection)

                    # verify master list signer certificates
                for mlsig_cert in masterList.signerCertificates:
                    issuer_cert = get_issuer_cert(mlsig_cert, cscas)
                    if issuer_cert is None:
                        self._log.info(
                            "Could not verify signature of master list signer certificate. Issuing CSCA not found! [C={} Ml-Sig-SerNo={}]"
                            .format(
                                mlsig_cert.subject.native['country_name'],
                                hex(mlsig_cert.serial_number).rstrip(
                                    "L").lstrip("0x")))
                    else:
                        try:
                            mlsig_cert.verify(issuer_cert)
                        except Exception as e:
                            self._log.info(
                                "Failed to verify master list signer C={} Ml-Sig-SerNo={}\n\treason: {}"
                                .format(
                                    mlsig_cert.subject.native['country_name'],
                                    hex(mlsig_cert.serial_number).rstrip(
                                        "L").lstrip("0x"), str(e)))
Exemplo n.º 15
0
import re

#from database.storage.DSC import CertX509
from pymrtd.pki.crl import writeToDB, readFromDB

from pymrtd.pki.crl import CertificateRevocationList

from database import Connection

conn = Connection("nejko", "nejko", "icao")


certificateList = {}
revocationList = {}
parser = LDIFParser(open('C://Users/nejko/Desktop/ZeroPass/B1/random/parseCSCAandCRL/database/icaopkd-001-dsccrl-003749.ldif', 'rb'))
for dn, entry in parser.parse():
    if 'userCertificate;binary' in entry:
        countryCode = re.findall(r'[c,C]{1}=(.*)(,dc=database){1}', dn)[0][0]
        cert = x509.Certificate.load(*entry['userCertificate;binary'])
        if countryCode not in certificateList:
            certificateList[countryCode] = {}
        certificateList[countryCode][cert.serial_number] = cert

    if 'certificateRevocationList;binary' in entry:
        countryCode = re.findall(r'[c,C]{1}=(.*)(,dc=database){1}', dn)[0][0]
        ##revocationList[countryCode] = x509.load_der_x509_crl(*entry['certificateRevocationList;binary'], default_backend())
        revocationList[countryCode] = crl.CertificateList.load(*entry['certificateRevocationList;binary'])
        revocationListInObject = revocationList[countryCode]
        #revocationListInObject1 = CertificateRevocationList(revocationListInObject)
        revocationListInObject.__class__ = CertificateRevocationList
        revocationListInObject.__init__()
Exemplo n.º 16
0
    def import_ldif(self):
        ldif_mappings = {
            "default": [
                "base.ldif",
                "attributes.ldif",
                "scopes.ldif",
                "scripts.ldif",
                "configuration.ldif",
                "scim.ldif",
                "oxidp.ldif",
                "oxtrust_api.ldif",
                "passport.ldif",
                "oxpassport-config.ldif",
                "gluu_radius_base.ldif",
                "gluu_radius_server.ldif",
                "clients.ldif",
                "oxtrust_api_clients.ldif",
                "scim_clients.ldif",
                "o_metric.ldif",
                "gluu_radius_clients.ldif",
                "passport_clients.ldif",
                "scripts_casa.ldif",
            ],
            "user": [
                "people.ldif",
                "groups.ldif",
            ],
            "site": [
                "o_site.ldif",
            ],
            "cache": [],
            "token": [],
        }

        # hybrid means only a subsets of ldif are needed
        if GLUU_PERSISTENCE_TYPE == "hybrid":
            mapping = GLUU_PERSISTENCE_LDAP_MAPPING
            ldif_mappings = {mapping: ldif_mappings[mapping]}

            # # these mappings require `base.ldif`
            # opt_mappings = ("user", "token",)

            # `user` mapping requires `o=gluu` which available in `base.ldif`
            # if mapping in opt_mappings and "base.ldif" not in ldif_mappings[mapping]:
            if "base.ldif" not in ldif_mappings[mapping]:
                ldif_mappings[mapping].insert(0, "base.ldif")

        ctx = prepare_template_ctx(self.manager)

        for mapping, files in ldif_mappings.iteritems():
            self.check_indexes(mapping)

            for file_ in files:
                logger.info("Importing {} file".format(file_))
                src = "/app/templates/ldif/{}".format(file_)
                dst = "/app/tmp/{}".format(file_)
                render_ldif(src, dst, ctx)

                parser = LDIFParser(open(dst))
                for dn, entry in parser.parse():
                    self.add_entry(dn, entry)
Exemplo n.º 17
0
def main(self):

    parser = argparse.ArgumentParser()

    parser.add_argument("-f",
                        "--ldiffile",
                        type=str,
                        dest='ldiffile',
                        help="LDIF File")
    parser.add_argument("-b",
                        "--database",
                        type=str,
                        dest='dbname',
                        help="DB Name")
    parser.add_argument("--inject", help="Inject Data", action="store_true")
    parser.add_argument("--export", help="Export Data", action="store_true")
    parser.add_argument("--user", help="User Import", action="store_true")
    parser.add_argument("--group", help="Group Import", action="store_true")

    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(1)
    args = parser.parse_args()

    ### Connect to Database
    client = MongoClient()
    db = client[args.dbname]
    UserCollection = db['Users']
    GroupCollection = db['Groups']

    if (args.inject):
        if (args.user):
            ldifparser = LDIFParser(open(args.ldiffile, 'rb'))
            user = {}
            for dn, entry in ldifparser.parse():
                #print ('got entry record : %s' % dn)
                #print (dn)
                cn = ""
                mail = ""
                uid = ""

                for element in entry.items():
                    ### Get the CN
                    if (element[0] in "cn"):
                        cn = element[1][0]

                    ### Get the mail
                    if (element[0] in "mail"):
                        mail = element[1][0]

                    ### Get the UID
                    if (element[0] in "uid"):
                        uid = element[1][0]

                if (cn != mail):
                    #print ({ 'cn' : cn, 'mail' : mail, 'uid' : uid})
                    insertLineToDB(
                        {
                            'dn': dn,
                            'cn': cn,
                            'mail': mail,
                            'uid': uid
                        }, UserCollection)
                else:
                    print("Found " + cn + " in " + mail + "for :" + dn)

        if (args.group):
            ldifparser = LDIFParser(open(args.ldiffile, 'rb'))
            for dn, entry in ldifparser.parse():
                #print ('got entry record : %s' % dn)
                cn = ""
                member = []

                for element in entry.items():
                    ### Get the CN
                    if (element[0] in "cn"):
                        cn = element[1][0]

                    ### Get the mail
                    if (element[0] in "member"):
                        #                        print (element[1])
                        member.append(element[1])

                print({'cn': cn, 'member': member})
                insertLineToDB({
                    'dn': dn,
                    'cn': cn,
                    'member': member
                }, GroupCollection)

    #insertLineToDB(user, UserCollection)

    if (args.export):
        pd.DataFrame(list(UserCollection.find())).to_csv("userexport.csv")