Exemplo n.º 1
0
    def init_account_info(self):
        # Set self.acc_name        = dict {account_id: user name}.
        # Set self.acc_passwd      = dict {account_id: password hash}.
        # Set self.acc_quarantines = dict {account_id: [quarantine list]}.
        # Set acc_locked_quarantines = acc_quarantines or separate dict
        timer = make_timer(self.logger, "Fetching account information...")
        timer2 = make_timer(self.logger)
        self.acc_name = acc_name = {}
        self.acc_passwd = {}
        self.acc_locked_quarantines = self.acc_quarantines = acc_quarantines = defaultdict(list)
        for row in self.account.list_account_authentication(
                auth_type=int(self.const.auth_type_md5_crypt)):
            account_id = int(row['account_id'])
            acc_name[account_id] = row['entity_name']
            self.acc_passwd[account_id] = row['auth_data']

        timer2("...account quarantines...")
        nonlock_quarantines = [
            int(self.const.Quarantine(code))
            for code in getattr(cereconf, 'QUARANTINE_FEIDE_NONLOCK', ())]
        if nonlock_quarantines:
            self.acc_locked_quarantines = acc_locked_quarantines = defaultdict(list)
        for row in self.account.list_entity_quarantines(
                entity_ids=self.accounts,
                only_active=True,
                entity_types=self.const.entity_account):
            qt = int(row['quarantine_type'])
            entity_id = int(row['entity_id'])
            acc_quarantines[entity_id].append(qt)
            if nonlock_quarantines and qt not in nonlock_quarantines:
                acc_locked_quarantines[entity_id].append(qt)
        timer("...account information done.")
Exemplo n.º 2
0
    def generate_person(self, outfile, alias_outfile, use_mail_module):
        """Output person tree and aliases if cereconf.LDAP_PERSON['dn'] is set.

        Aliases are only output if cereconf.LDAP_PERSON['aliases'] is true.

        If use_mail_module is set, persons' e-mail addresses are set to
        their primary users' e-mail addresses.  Otherwise, the addresses
        are taken from contact info registered for the individual persons."""
        if not self.person_dn:
            return
        self.init_person_dump(use_mail_module)
        if self.person_parent_dn not in (None, self.org_dn):
            outfile.write(container_entry_string('PERSON'))
        timer = make_timer(self.logger, "Processing persons...")
        round_timer = make_timer(self.logger)
        rounds = 0
        exported = 0
        for person_id, row in self.person_cache.iteritems():
            if rounds % 10000 == 0 and rounds != 0:
                round_timer("...processed %d rows..." % rounds)
            rounds += 1
            dn, entry, alias_info = self.make_person_entry(row, person_id)
            if dn:
                if dn in self.used_DNs:
                    self.logger.warn("Omitting person_id %d: duplicate DN '%s'"
                                     % (person_id, dn))
                else:
                    self.used_DNs[dn] = True
                    outfile.write(entry_string(dn, entry, False))
                    if self.aliases and alias_info:
                        self.write_person_alias(alias_outfile,
                                                dn, entry, alias_info)
                    exported += 1
        timer("...persons done, %d exported and %d omitted." %
              (exported, rounds - exported))
Exemplo n.º 3
0
    def init_account_mail(self, use_mail_module):
        u""" Cache account mail addresses.

        This method builds a dict cache that maps account_id -> primary email
        address, and assigns the `dict.get` method to `self.account_mail`.

        NOTE: The LDAP_PERSON['mail_target_types'] setting decides which email
        target types are considered.

        :param bool use_mail_module:
            If True, Cerebrum.modules.Email will be used to populate this
            cache; otherwise the `self.account_mail` method will be None (not
            implemented).

        """
        # Set self.account_mail = None if not use_mail_module, otherwise
        #                         function: account_id -> ('address' or None).
        if use_mail_module:
            timer = make_timer(self.logger, "Fetching account e-mail addresses...")

            # Get target types from config
            mail_target_types = []
            for value in ldapconf('PERSON', 'mail_target_types', []):
                code = self.const.human2constant(value, self.const.EmailTarget)
                if code is None:
                    self.logger.warn("Unknown EmailTarget %r in setting %s",
                                     value, "LDAP_PERSON['mail_target_types']")
                else:
                    mail_target_types.append(code)

            # We don't want to import this if mod_email isn't present.
            from Cerebrum.modules.Email import EmailDomain, EmailTarget
            targets = EmailTarget(self.db).list_email_target_primary_addresses
            rewrite = EmailDomain(self.db).rewrite_special_domains

            # Look up target addrs. Note that the reversed order causes the
            # lesser prioritized target types to be overwritten by targets with
            # higher priority.
            mail = {}
            for code in reversed(mail_target_types):
                target_timer = make_timer(self.logger)
                for row in targets(target_type=code):
                    try:
                        mail[int(row['target_entity_id'])] = "@".join(
                            (row['local_part'], rewrite(row['domain'])))
                    except TypeError:
                        continue
                target_timer("...target_type '{!s}' done".format(code))
            self.account_mail = mail.get
            timer("...account e-mail addresses done.")
        else:
            self.account_mail = None
Exemplo n.º 4
0
def generate_dump(db, filename, use_mail_module):
    ldif = Factory.get('OrgLDIF')(db, logger.getChild('OrgLDIF'))

    timer = make_timer(logger, 'Starting dump')
    outfile = ldif_outfile('ORG', filename)
    logger.debug('writing org data to %r', outfile)

    timer('Generating org data...')
    ldif.generate_org_object(outfile)

    ou_outfile = ldif_outfile('OU', default=outfile, explicit_default=filename)
    logger.debug('Writing ou data to %r', outfile)
    timer('Generating ou data...')
    ldif.generate_ou(ou_outfile)

    pers_outfile = ldif_outfile('PERSON',
                                default=outfile,
                                explicit_default=filename)
    logger.debug('Writing person data to %r', outfile)
    timer('Generating person data...')
    ldif.generate_person(pers_outfile, ou_outfile, use_mail_module)

    end_ldif_outfile('PERSON', pers_outfile, outfile)
    end_ldif_outfile('OU', ou_outfile, outfile)
    end_ldif_outfile('ORG', outfile)
    timer("Dump done")
Exemplo n.º 5
0
    def cache_group2persons(self):
        # IVR 2010-03-10: A number of groups at UiO (ansatt-* -- autogenerated
        # based on employment data) have people, rather than accounts as
        # members. However, in order to help vortex, we expand temporarily
        # these groups in such a fashion, that export to LDAP entails
        # remapping person_id to its primary user's id.
        timer = make_timer(self.logger, 'Starting UiO cache_group2persons...')
        ldapgroup = set()
        nisng = set()
        for row in self.grp.list_all_with_spread(
                spreads=self.const.spread_ldap_group):
            ldapgroup.add(int(row['entity_id']))
        for row in self.grp.list_all_with_spread(
                spreads=self.const.spread_uio_nis_ng):
            nisng.add(int(row['entity_id']))

        help_vortex_groups = ldapgroup & nisng

        for row in self.grp.search_members(
                group_id=help_vortex_groups,
                member_type=self.const.entity_person):
            person_id = row["member_id"]
            # This is a hack. When it fails, ignore it silently.
            if person_id not in self.pid2primary_aid:
                continue

            user_id = int(self.pid2primary_aid[person_id])
            self.group2persons[int(row['group_id'])].append(user_id)
        timer('... done UiO cache_group2persons')
Exemplo n.º 6
0
    def cache_group2persons(self):
        # IVR 2010-03-10: A number of groups at UiO (ansatt-* -- autogenerated
        # based on employment data) have people, rather than accounts as
        # members. However, in order to help vortex, we expand temporarily
        # these groups in such a fashion, that export to LDAP entails
        # remapping person_id to its primary user's id.
        timer = make_timer(self.logger, 'Starting UiO cache_group2persons...')
        ldapgroup = set()
        nisng = set()
        for row in self.grp.list_all_with_spread(
                spreads=self.const.spread_ldap_group):
            ldapgroup.add(int(row['entity_id']))
        for row in self.grp.list_all_with_spread(
                spreads=self.const.spread_uio_nis_ng):
            nisng.add(int(row['entity_id']))

        help_vortex_groups = ldapgroup & nisng

        for row in self.grp.search_members(
                group_id=help_vortex_groups,
                member_type=self.const.entity_person):
            person_id = row["member_id"]
            # This is a hack. When it fails, ignore it silently.
            if person_id not in self.pid2primary_aid:
                continue

            user_id = int(self.pid2primary_aid[person_id])
            self.group2persons[int(row['group_id'])].append(user_id)
        timer('... done UiO cache_group2persons')
Exemplo n.º 7
0
    def init_account_mail(self, use_mail_module):
        u""" Cache account mail addresses.

        This method adds to the general to fill the primary email attribute
        This is done to prepare for changing the normal email attribute

        :param bool use_mail_module:
            If True, Cerebrum.modules.Email will be used to populate this
            cache; otherwise the `self.account_mail` dict will be None.
        """
        super(OrgLDIFUiOMixin, self).init_account_mail(use_mail_module)
        if use_mail_module:
            timer = make_timer(
                self.logger,
                "Doing UiO specific changes to account e-mail addresses...")
            self.account_primary_mail = self.account_mail.copy()
            # We don't want to import this if mod_email isn't present.
            from Cerebrum.modules.Email import EmailTarget
            targets = EmailTarget(self.db).list_email_target_addresses
            mail = {}
            for row in targets(target_type=self.const.email_target_account,
                               domain='uio.no',
                               uname_local=True):
                # Can only return [email protected] so no need for any checks
                mail[int(row['target_entity_id'])] = "@".join(
                    (row['local_part'], row['domain']))
            self.account_mail.update(mail)
            timer("...UiO specfic account e-mail addresses done.")
Exemplo n.º 8
0
    def __init__(self, *rest, **kw):
        super(PosixLDIF_UiOMixin, self).__init__(*rest, **kw)
        timer = make_timer(self.logger, 'Initing PosixLDIF_UiOMixin...')

        # load person_id -> primary account_id
        account = Factory.get("Account")(self.db)
        self.pid2primary_aid = dict()
        for row in account.list_accounts_by_type(
                primary_only=True,
                account_spread=self.spread_d["user"][0]):
            self.pid2primary_aid[row["person_id"]] = row["account_id"]
        timer('... done initing PosixLDIF_UiOMixin')
        # handle exempt users
        self.pq_exempt_user_ids = set()
        if hasattr(cereconf, 'PQ_EXEMPT_GROUP'):
            try:
                self.grp.find_by_name(cereconf.PQ_EXEMPT_GROUP)
                for member in self.grp.search_members(
                        group_id=self.grp.entity_id,
                        member_type=(self.const.entity_account,
                                     self.const.entity_person),
                        indirect_members=True):
                    self.pq_exempt_user_ids.add(member['member_id'])
                self.grp.clear()
            except NotFoundError:
                self.logger.error(
                    'Could not find PQ_EXEMPT_GROUP "{group}"'.format(
                        group=cereconf.PQ_EXEMPT_GROUP))
            except Exception as e:
                # should not happen unless nonexisting group-name is specified
                self.logger.error(
                    'PQ_EXEMPT_GROUP defined in cereconf, but extracting '
                    'exempt users failed: {error}'.format(error=e))
Exemplo n.º 9
0
 def load_disk_tab(self):
     timer = make_timer(self.logger, 'Starting load_disk_tab...')
     self.disk = Factory.get('Disk')(self.db)
     self.disk_tab = {}
     for hd in self.disk.list():
         self.disk_tab[int(hd['disk_id'])] = hd['path']
     timer('... done load_disk_tab')
Exemplo n.º 10
0
 def load_disk_tab(self):
     timer = make_timer(self.logger, 'Starting load_disk_tab...')
     self.disk = Factory.get('Disk')(self.db)
     self.disk_tab = {}
     for hd in self.disk.list():
         self.disk_tab[int(hd['disk_id'])] = hd['path']
     timer('... done load_disk_tab')
Exemplo n.º 11
0
    def __init__(self, *rest, **kw):
        super(PosixLDIF_UiOMixin, self).__init__(*rest, **kw)
        timer = make_timer(self.logger, 'Initing PosixLDIF_UiOMixin...')

        # load person_id -> primary account_id
        account = Factory.get("Account")(self.db)
        self.pid2primary_aid = dict()
        for row in account.list_accounts_by_type(
                primary_only=True, account_spread=self.spread_d["user"][0]):
            self.pid2primary_aid[row["person_id"]] = row["account_id"]
        timer('... done initing PosixLDIF_UiOMixin')
        # handle exempt users
        self.pq_exempt_user_ids = set()
        if hasattr(cereconf, 'PQ_EXEMPT_GROUP'):
            try:
                self.grp.find_by_name(cereconf.PQ_EXEMPT_GROUP)
                for member in self.grp.search_members(
                        group_id=self.grp.entity_id,
                        member_type=(self.const.entity_account,
                                     self.const.entity_person),
                        indirect_members=True):
                    self.pq_exempt_user_ids.add(member['member_id'])
                self.grp.clear()
            except NotFoundError:
                self.logger.error(
                    'Could not find PQ_EXEMPT_GROUP "{group}"'.format(
                        group=cereconf.PQ_EXEMPT_GROUP))
            except Exception as e:
                # should not happen unless nonexisting group-name is specified
                self.logger.error(
                    'PQ_EXEMPT_GROUP defined in cereconf, but extracting '
                    'exempt users failed: {error}'.format(error=e))
Exemplo n.º 12
0
    def netgroup_ldif(self, filename=None):
        """Generate netgroup with only users."""

        timer = make_timer(self.logger, 'Starting netgroup_ldif...')
        if 'netgroup' not in self.spread_d:
            self.logger.warn("No valid netgroup-spread in cereconf or arg!")
            return

        self.init_netgroup()
        timer2 = make_timer(self.logger, 'Caching netgroups...')
        for row in self.grp.search(spread=self.spread_d['netgroup'],
                                   filter_expired=False):
            group_id = row['group_id']
            self.create_group_object(group_id, row['name'], row['description'])
            self.create_netgroup_object(group_id)
        timer2('... done caching filegroups')
        self.cache_uncached_children()
        timer2 = make_timer(self.logger, 'Adding users and groups...')
        for group_id, entry in self.netgroupcache.iteritems():
            users, groups = self.get_users_and_groups(group_id,
                                                      set(),
                                                      set(),
                                                      add_persons=True)
            unames = self.userid2unames(users, group_id)
            triple = []
            for uname in unames:
                if '_' in uname:
                    continue
                triple.append('(,%s,)' % uname)

            netgroup = []
            for g in groups:
                netgroup.append(self.netgroupcache[g]['cn'])

            entry['nisNetgroupTriple'] = triple
            entry['memberNisNetgroup'] = netgroup
        timer2('... done adding users and groups')
        timer2 = make_timer(self.logger, 'Writing group objects...')
        f = LDIFutils.ldif_outfile('NETGROUP', filename, self.fd)
        f.write(LDIFutils.container_entry_string('NETGROUP'))
        for group_id, entry in self.netgroupcache.iteritems():
            dn = ','.join(('cn=' + entry['cn'], self.ngrp_dn))
            f.write(LDIFutils.entry_string(dn, entry, False))
        LDIFutils.end_ldif_outfile('NETGROUP', f, self.fd)
        timer2('... done writing group objects')
        self.netgroupcache = None
        timer('... done netgroup_ldif')
Exemplo n.º 13
0
 def init_person_office365_consents(self):
     """Fetch the IDs of persons who have consented
     to being exported to Office 365."""
     timer = make_timer(self.logger, 'Fetching Office 365 consents...')
     consents = self.person.list_consents(
         consent_code=self.const.consent_office365)
     self.office365_consents = set([c['entity_id'] for c in consents])
     timer('...Office 365 consents done.')
Exemplo n.º 14
0
 def init_person_entitlements(self):
     """Populate dicts with a person's entitlement information."""
     timer = make_timer(self.logger, 'Processing person entitlements...')
     self.person2entitlements = pickle.load(
         file(
             os.path.join(ldapconf(None, 'dump_dir'),
                          ldapconf('PERSON', 'entitlements_pickle_file'))))
     timer("...person entitlements done.")
Exemplo n.º 15
0
 def init_person_groups(self):
     """Populate dicts with a person's group information."""
     timer = make_timer(self.logger, 'Processing person groups...')
     self.person2group = pickle.load(
         file(
             join_paths(ldapconf(None, 'dump_dir'),
                        "personid2group.pickle")))
     timer("...person groups done.")
Exemplo n.º 16
0
 def init_person_entitlements(self):
     """Populate dicts with a person's entitlement information."""
     timer = make_timer(self.logger, 'Processing person entitlements...')
     self.person2entitlements = pickle.load(file(
         os.path.join(
             ldapconf(None, 'dump_dir'),
             ldapconf('PERSON', 'entitlements_pickle_file'))))
     timer("...person entitlements done.")
Exemplo n.º 17
0
    def netgroup_ldif(self, filename=None):
        """Generate netgroup with only users."""

        timer = make_timer(self.logger, 'Starting netgroup_ldif...')
        if 'netgroup' not in self.spread_d:
            self.logger.warn("No valid netgroup-spread in cereconf or arg!")
            return

        self.init_netgroup()
        timer2 = make_timer(self.logger, 'Caching netgroups...')
        for row in self.grp.search(spread=self.spread_d['netgroup'],
                                   filter_expired=False):
            group_id = row['group_id']
            self.create_group_object(group_id, row['name'],
                                     row['description'])
            self.create_netgroup_object(group_id)
        timer2('... done caching filegroups')
        self.cache_uncached_children()
        timer2 = make_timer(self.logger, 'Adding users and groups...')
        for group_id, entry in self.netgroupcache.iteritems():
            users, groups = self.get_users_and_groups(group_id, set(), set(),
                                                      add_persons=True)
            unames = self.userid2unames(users, group_id)
            triple = []
            for uname in unames:
                if '_' in uname:
                    continue
                triple.append('(,%s,)' % uname)

            netgroup = []
            for g in groups:
                netgroup.append(self.netgroupcache[g]['cn'])

            entry['nisNetgroupTriple'] = triple
            entry['memberNisNetgroup'] = netgroup
        timer2('... done adding users and groups')
        timer2 = make_timer(self.logger, 'Writing group objects...')
        f = LDIFutils.ldif_outfile('NETGROUP', filename, self.fd)
        f.write(LDIFutils.container_entry_string('NETGROUP'))
        for group_id, entry in self.netgroupcache.iteritems():
            dn = ','.join(('cn=' + entry['cn'], self.ngrp_dn))
            f.write(LDIFutils.entry_string(dn, entry, False))
        LDIFutils.end_ldif_outfile('NETGROUP', f, self.fd)
        timer2('... done writing group objects')
        self.netgroupcache = None
        timer('... done netgroup_ldif')
Exemplo n.º 18
0
 def cache_id2stedkode(self):
     timer = make_timer(self.logger, 'Starting cache_id2stedkode...')
     self.id2stedkode = {}
     ou = Factory.get('OU')(self.db)
     for row in ou.get_stedkoder():
         self.id2stedkode[row['ou_id']] = "%02d%02d%02d" % \
             (row['fakultet'], row['institutt'], row['avdeling'])
     timer('... done cache_id2stedkode')
Exemplo n.º 19
0
    def filegroup_ldif(self, filename=None):
        """ Generate filegroup.

        Groups without group and expanded members from both external and
        internal groups.

        """
        timer = make_timer(self.logger, 'Starting filegroup_ldif...')
        if 'filegroup' not in self.spread_d:
            self.logger.warn("No spread is given for filegroup!")
            return

        self.init_filegroup()
        timer2 = make_timer(self.logger, 'Caching filegroups...')
        for row in self.grp.search(spread=self.spread_d['filegroup'],
                                   filter_expired=False):
            group_id = row['group_id']
            if group_id not in self.group2gid:
                self.logger.warn(
                    "Group id:{} has one of {} but no GID, skipping".format(
                        group_id,
                        getattr(cereconf,
                                'LDAP_FILEGROUP').get('spread'), []))
                continue
            self.create_group_object(group_id, row['name'],
                                     row['description'])
            self.create_filegroup_object(group_id)
            self.update_filegroup_entry(group_id)
        timer2('... done caching filegroups')
        self.cache_uncached_children()
        timer2 = make_timer(self.logger, 'Adding users and groups...')
        for group_id, entry in self.filegroupcache.iteritems():
            users = self.get_users(group_id, set())
            unames = self.userid2unames(users, group_id)
            entry['memberUid'] = unames
        timer2('... done adding users')
        timer2 = make_timer(self.logger, 'Writing group objects...')
        f = LDIFutils.ldif_outfile('FILEGROUP', filename, self.fd)
        f.write(LDIFutils.container_entry_string('FILEGROUP'))
        for group_id, entry in self.filegroupcache.iteritems():
            dn = ','.join(('cn=' + entry['cn'], self.fgrp_dn))
            f.write(LDIFutils.entry_string(dn, entry, False))
        timer2('... done writing group objects')
        self.filegroupcache = None
        LDIFutils.end_ldif_outfile('FILEGROUP', f, self.fd)
        timer('... done  filegroup_ldif')
Exemplo n.º 20
0
 def init_person_office365_consents(self):
     """Fetch the IDs of persons who have consented
     to being exported to Office 365."""
     timer = make_timer(self.logger, 'Fetching Office 365 consents...')
     consents = self.person.list_consents(
         consent_code=self.const.consent_office365)
     self.office365_consents = set([c['entity_id'] for c in consents])
     timer('...Office 365 consents done.')
Exemplo n.º 21
0
 def init_person_course(self):
     """Populate dicts with a person's course information."""
     timer = make_timer(self.logger, 'Processing person courses...')
     self.ownerid2urnlist = pickle.load(
         file(
             join_paths(ldapconf(None, 'dump_dir'),
                        "ownerid2urnlist.pickle")))
     timer("...person courses done.")
Exemplo n.º 22
0
 def cache_id2stedkode(self):
     timer = make_timer(self.logger, 'Starting cache_id2stedkode...')
     self.id2stedkode = {}
     ou = Factory.get('OU')(self.db)
     for row in ou.get_stedkoder():
         self.id2stedkode[row['ou_id']] = "%02d%02d%02d" % \
             (row['fakultet'], row['institutt'], row['avdeling'])
     timer('... done cache_id2stedkode')
Exemplo n.º 23
0
def gen_undervisningsaktivitet(cgi, sip, out):
    timer = make_timer(logger, 'Starting gen_undervisningsaktivitet')
    # uioEduSection - Undervisningsaktivitet (instansiering av gruppe,
    #                 kollokvia, lab, skrivekurs, forelesning)
    # access_FS.py:Undervisning.list_aktiviteter
    #
    # uioEduCourseCode - FS.emne.emnekode
    # uioEduCourseAdministrator - (FS.emne.*_reglement (6 siffer)).
    # uioEduCourseLevel - (FS.emne.studienivakode)
    # uioEduCourseName - (FS.emne.emnenavn_bokmal)
    # uioEduCourseSectionName - (FS.undaktivitet.aktivitetsnavn)
    # uioEduCourseOffering - urn:mace:uit.no:section:<noe>
    n = 0
    ret = {}
    top_dn = ldapconf('KURS', 'dn')
    for entry in sip.undervisningsaktiviteter:
        try:
            emne = sip.emnekode2info[entry['emnekode']]
        except KeyError:
            logger.warn(
                "Undervisningsaktivitet %s er ikke knyttet til gyldig emne",
                entry['emnekode'])
            continue
        if 'emnenavn_bokmal' not in emne:
            logger.warn("Undervisningsaktivitet %s uten enhet?" % repr(entry))
            continue
        aktivitet_id = {}
        for persontype, role in interesting_fs_roles:
            args = [entry[x] for x in CerebrumGroupInfo.id_key_seq]
            args.extend((entry['aktivitetkode'], persontype))
            args = [x.lower() for x in args]
            entity_id = cgi.find_group_by_undervisningsaktivitet(*args)
            if entity_id is not None:
                aktivitet_id["%i" % entity_id] = role
        keys = aktivitet_id.keys()
        keys.sort()
        urn = 'urn:mace:uit.no:section:aktivitet-%s' % "_".join(keys)
        out.write(
            entry_string(
                "cn=ua-%i,%s" % (n, top_dn), {
                    'objectClass': ("top", "uioEduSection"),
                    'uioEduCourseCode': (entry['emnekode'], ),
                    'uioEduCourseAdministrator': (emne['sko'], ),
                    'uioEduCourseLevel': (emne['studienivakode'], ),
                    'uioEduCourseName': (emne['emnenavn_bokmal'], ),
                    'uioEduCourseSectionName': (entry['aktivitetsnavn'], ),
                    'uioEduCourseInstitution': (emne['institusjonsnr'], ),
                    'uioEduCourseVersion': (emne['versjonskode'], ),
                    'uioEduCourseSectionCode': (entry['aktivitetkode'], ),
                    'uioEduOfferingTermCode': (entry['terminkode'], ),
                    'uioEduOfferingYear': (entry['arstall'], ),
                    'uioEduOfferingTermNumber': (entry['terminnr'], ),
                    'uioEduCourseOffering': (urn, )
                }))
        n += 1
        ret[urn] = aktivitet_id
    timer('... done gen_undervisningsaktivitet')
    return ret
Exemplo n.º 24
0
 def init_person_birth_dates(self):
     # Set self.birth_dates = dict {person_id: birth date}
     timer = make_timer(self.logger, 'Fetching birth dates...')
     self.birth_dates = birth_dates = {}
     for row in self.person.list_persons(person_id=self.persons):
         birth_date = row['birth_date']
         if birth_date:
             birth_dates[int(row['person_id'])] = birth_date
     timer("...birth dates done.")
Exemplo n.º 25
0
 def init_person_birth_dates(self):
     # Set self.birth_dates = dict {person_id: birth date}
     timer = make_timer(self.logger, 'Fetching birth dates...')
     self.birth_dates = birth_dates = {}
     for row in self.person.list_persons(person_id=self.persons):
         birth_date = row['birth_date']
         if birth_date:
             birth_dates[int(row['person_id'])] = birth_date
     timer("...birth dates done.")
Exemplo n.º 26
0
    def filegroup_ldif(self, filename=None):
        """ Generate filegroup.

        Groups without group and expanded members from both external and
        internal groups.

        """
        timer = make_timer(self.logger, 'Starting filegroup_ldif...')
        if 'filegroup' not in self.spread_d:
            self.logger.warn("No spread is given for filegroup!")
            return

        self.init_filegroup()
        timer2 = make_timer(self.logger, 'Caching filegroups...')
        for row in self.grp.search(spread=self.spread_d['filegroup'],
                                   filter_expired=False):
            group_id = row['group_id']
            if group_id not in self.group2gid:
                self.logger.warn(
                    "Group id:{} has one of {} but no GID, skipping".format(
                        group_id,
                        getattr(cereconf, 'LDAP_FILEGROUP').get('spread'), []))
                continue
            self.create_group_object(group_id, row['name'], row['description'])
            self.create_filegroup_object(group_id)
            self.update_filegroup_entry(group_id)
        timer2('... done caching filegroups')
        self.cache_uncached_children()
        timer2 = make_timer(self.logger, 'Adding users and groups...')
        for group_id, entry in self.filegroupcache.iteritems():
            users = self.get_users(group_id, set())
            unames = self.userid2unames(users, group_id)
            entry['memberUid'] = unames
        timer2('... done adding users')
        timer2 = make_timer(self.logger, 'Writing group objects...')
        f = LDIFutils.ldif_outfile('FILEGROUP', filename, self.fd)
        f.write(LDIFutils.container_entry_string('FILEGROUP'))
        for group_id, entry in self.filegroupcache.iteritems():
            dn = ','.join(('cn=' + entry['cn'], self.fgrp_dn))
            f.write(LDIFutils.entry_string(dn, entry, False))
        timer2('... done writing group objects')
        self.filegroupcache = None
        LDIFutils.end_ldif_outfile('FILEGROUP', f, self.fd)
        timer('... done  filegroup_ldif')
Exemplo n.º 27
0
 def init_person_entitlements(self):
     """Populate dicts with a person's entitlement information."""
     timer = make_timer(self.logger, 'Processing person entitlements...')
     path = os.path.join(ldapconf(None, 'dump_dir'),
                         ldapconf('PERSON', 'entitlements_file'))
     with io.open(path, encoding='utf-8') as stream:
         data = json.loads(stream.read())
     # convert string keys to int
     self.person2entitlements = {int(k): v for k, v in data.items()}
     timer("...person entitlements done.")
Exemplo n.º 28
0
 def load_quaratines(self):
     timer = make_timer(self.logger, 'Starting load_quaratines...')
     self.quarantines = defaultdict(list)
     for row in self.posuser.list_entity_quarantines(
             entity_types=self.const.entity_account,
             only_active=True,
             spreads=self.spread_d['user']):
         self.quarantines[int(row['entity_id'])].append(
                 int(row['quarantine_type']))
     timer('... done load_quaratines')
Exemplo n.º 29
0
 def init_person_entitlements(self):
     """Populate dicts with a person's entitlement information."""
     timer = make_timer(self.logger, 'Processing person entitlements...')
     path = os.path.join(ldapconf(None, 'dump_dir'),
                         ldapconf('PERSON', 'entitlements_file'))
     with io.open(path, encoding='utf-8') as stream:
         data = json.loads(stream.read())
     # convert string keys to int
     self.person2entitlements = {int(k): v for k, v in data.items()}
     timer("...person entitlements done.")
Exemplo n.º 30
0
 def cache_uncached_children(self):
     timer = make_timer(self.logger, 'Starting cache_uncached_children...')
     children = set()
     map(children.update, self.group2groups.itervalues())
     extra = children.difference(self.groupcache.keys())
     if extra:
         for row in self.grp.search(group_id=extra):
             self.create_group_object(row['group_id'], row['name'],
                                      row['description'])
     timer('... done cache_uncached_children')
Exemplo n.º 31
0
 def cache_uncached_children(self):
     timer = make_timer(self.logger, 'Starting cache_uncached_children...')
     children = set()
     map(children.update, self.group2groups.itervalues())
     extra = children.difference(self.groupcache.keys())
     if extra:
         for row in self.grp.search(group_id=extra):
             self.create_group_object(row['group_id'], row['name'],
                                      row['description'])
     timer('... done cache_uncached_children')
Exemplo n.º 32
0
 def load_quaratines(self):
     timer = make_timer(self.logger, 'Starting load_quaratines...')
     self.quarantines = defaultdict(list)
     for row in self.posuser.list_entity_quarantines(
             entity_types=self.const.entity_account,
             only_active=True,
             spreads=self.spread_d['user']):
         self.quarantines[int(row['entity_id'])].append(
             int(row['quarantine_type']))
     timer('... done load_quaratines')
Exemplo n.º 33
0
def gen_undervisningsaktivitet(cgi, sip, out):
    timer = make_timer(logger, 'Starting gen_undervisningsaktivitet')
    # uioEduSection - Undervisningsaktivitet (instansiering av gruppe,
    #                 kollokvia, lab, skrivekurs, forelesning)
    # access_FS.py:Undervisning.list_aktiviteter
    #
    # uioEduCourseCode - FS.emne.emnekode
    # uioEduCourseAdministrator - (FS.emne.*_reglement (6 siffer)).
    # uioEduCourseLevel - (FS.emne.studienivakode)
    # uioEduCourseName - (FS.emne.emnenavn_bokmal)
    # uioEduCourseSectionName - (FS.undaktivitet.aktivitetsnavn)
    # uioEduCourseOffering - urn:mace:uio.no:section:<noe>
    n = 0
    ret = {}
    top_dn = ldapconf('KURS', 'dn')
    for entry in sip.undervisningsaktiviteter:
        try:
            emne = sip.emnekode2info[entry['emnekode']]
        except KeyError:
            logger.warn(
                "Undervisningsaktivitet %s er ikke knyttet til gyldig emne",
                entry['emnekode'])
            continue
        if 'emnenavn_bokmal' not in emne:
            logger.warn("Undervisningsaktivitet %s uten enhet?" % repr(entry))
            continue
        aktivitet_id = {}
        for persontype, role in interesting_fs_roles:
            args = [entry[x] for x in CerebrumGroupInfo.id_key_seq]
            args.extend((entry['aktivitetkode'], persontype))
            args = [x.lower() for x in args]
            entity_id = cgi.find_group_by_undervisningsaktivitet(*args)
            if entity_id is not None:
                aktivitet_id["%i" % entity_id] = role
        keys = aktivitet_id.keys()
        keys.sort()
        urn = 'urn:mace:uio.no:section:aktivitet-%s' % "_".join(keys)
        out.write(entry_string("cn=ua-%i,%s" % (n, top_dn), {
            'objectClass': ("top", "uioEduSection"),
            'uioEduCourseCode': (entry['emnekode'],),
            'uioEduCourseAdministrator': (emne['sko'],),
            'uioEduCourseLevel': (emne['studienivakode'],),
            'uioEduCourseName': (emne['emnenavn_bokmal'],),
            'uioEduCourseSectionName': (entry['aktivitetsnavn'],),
            'uioEduCourseInstitution': (emne['institusjonsnr'],),
            'uioEduCourseVersion': (emne['versjonskode'],),
            'uioEduCourseSectionCode': (entry['aktivitetkode'],),
            'uioEduOfferingTermCode': (entry['terminkode'],),
            'uioEduOfferingYear': (entry['arstall'],),
            'uioEduOfferingTermNumber': (entry['terminnr'],),
            'uioEduCourseOffering': (urn,)}))
        n += 1
        ret[urn] = aktivitet_id
    timer('... done gen_undervisningsaktivitet')
    return ret
Exemplo n.º 34
0
    def __init__(self, db, logger, u_sprd=None, g_sprd=None, n_sprd=None,
                 fd=None):
        """ Initiate database and import modules.

        Spreads are given in initiation and general constants which is
        used in more than one method.

        """
        timer = make_timer(logger, 'Initing PosixLDIF...')
        from Cerebrum.modules import PosixGroup
        self.db = db
        self.logger = logger
        self.const = Factory.get('Constants')(self.db)
        self.grp = Factory.get('Group')(self.db)
        self.posuser = Factory.get('PosixUser')(self.db)
        self.posgrp = PosixGroup.PosixGroup(self.db)
        self.user_dn = LDIFutils.ldapconf('USER', 'dn', None)
        # This is an odd one -- if set to False, then id2uname should be
        # populated with users exported in the users export -- which makes the
        # group exports filter group members by *actually* exported users...
        self.get_name = True
        self.fd = fd
        self.spread_d = {}
        # Validate spread from arg or from cereconf
        for x, y in zip(['USER', 'FILEGROUP', 'NETGROUP'],
                        [u_sprd, g_sprd, n_sprd]):
            spread = LDIFutils.map_spreads(
                y or getattr(cereconf, 'LDAP_' + x).get('spread'), list)
            if spread:
                self.spread_d[x.lower()] = spread
        if 'user' not in self.spread_d:
            raise Errors.ProgrammingError(
                "Must specify spread-value as 'arg' or in cereconf")
        self.account2name = dict()
        self.group2gid = dict()
        self.groupcache = defaultdict(dict)
        self.group2groups = defaultdict(set)
        self.group2users = defaultdict(set)
        self.group2persons = defaultdict(list)
        self.shell_tab = dict()
        self.quarantines = dict()
        self.user_exporter = UserExporter(self.db)
        if len(self.spread_d['user']) > 1:
            logger.warning('Exporting users with multiple spreads, '
                           'ignoring homedirs from %r',
                           self.spread_d['user'][1:])
        self.homedirs = HomedirResolver(db, self.spread_d['user'][0])
        self.owners = OwnerResolver(db)

        auth_attr = LDIFutils.ldapconf('USER', 'auth_attr', None)
        self.user_password = AuthExporter.make_exporter(
            db,
            auth_attr['userPassword'])
        timer('... done initing PosixLDIF.')
Exemplo n.º 35
0
    def init_person_titles(self):
        """Extends the person_titles dict with employment titles available via
        the PersonEmployment module."""
        self.__super.init_person_titles()

        timer = make_timer(self.logger, 'Fetching personal employment titles...')
        employments = self.person.search_employment(main_employment=True)
        for emp in employments:
            if emp['person_id'] not in self.person_titles:
                title = [(self.const.language_nb, iso2utf(emp['description']))]
                self.person_titles[emp['person_id']] = title
        timer("...personal employment titles done.")
Exemplo n.º 36
0
 def init_user(self, auth_meth=None):
     timer = make_timer(self.logger, 'Starting init_user...')
     self.get_name = False
     self.qh = QuarantineHandler(self.db, None)
     self.posuser = Factory.get('PosixUser')(self.db)
     self.load_disk_tab()
     self.load_shell_tab()
     self.load_quaratines()
     self.load_auth_tab(auth_meth)
     self.cache_account2name()
     self.id2uname = {}
     timer('... init_user done.')
Exemplo n.º 37
0
    def __init__(self, *rest, **kw):
        super(PosixLDIF_UiOMixin, self).__init__(*rest, **kw)
        timer = make_timer(self.logger, 'Initing PosixLDIF_UiOMixin...')

        # load person_id -> primary account_id
        account = Factory.get("Account")(self.db)
        self.pid2primary_aid = dict()
        for row in account.list_accounts_by_type(
                primary_only=True,
                account_spread=self.spread_d["user"][0]):
            self.pid2primary_aid[row["person_id"]] = row["account_id"]
        timer('... done initing PosixLDIF_UiOMixin')
Exemplo n.º 38
0
 def init_user(self, auth_meth=None):
     timer = make_timer(self.logger, 'Starting init_user...')
     self.get_name = False
     self.qh = QuarantineHandler(self.db, None)
     self.posuser = Factory.get('PosixUser')(self.db)
     self.load_disk_tab()
     self.load_shell_tab()
     self.load_quaratines()
     self.load_auth_tab(auth_meth)
     self.cache_account2name()
     self.id2uname = {}
     timer('... init_user done.')
Exemplo n.º 39
0
 def __init__(self):
     timer = make_timer(logger, 'Initing CerebrumGroupInfo...')
     self._emne_key2dta = defaultdict(list)
     len_id_key_seq = len(CerebrumGroupInfo.id_key_seq)
     for row in group.search(name="%s%%" % CerebrumGroupInfo.PREFIX):
         name = row['name'][len(CerebrumGroupInfo.PREFIX):]
         emne_key = name.split(":")[:len_id_key_seq]
         emne_val = name.split(":")[len_id_key_seq:]
         self._emne_key2dta[tuple(emne_key)].append(
             {'group_id': int(row['group_id']),
              'emne_val': emne_val})
     timer('... done initing CerebrumGroupInfo')
Exemplo n.º 40
0
 def init_person_names(self):
     # Set self.person_names = dict {person_id: {name_variant: name}}
     timer = make_timer(self.logger, "Fetching personal names...")
     self.person_names = person_names = defaultdict(dict)
     for row in self.person.search_person_names(
             name_variant=[self.const.name_full,
                           self.const.name_first,
                           self.const.name_last],
             person_id=self.persons,
             source_system=self.const.system_cached):
         person_id = int(row['person_id'])
         person_names[person_id][int(row['name_variant'])] = row['name']
     timer("...personal names done.")
Exemplo n.º 41
0
    def person_authn_methods(self):
        """ Returns a contact info mapping for update_person_authn.

        Initializes self.person_authn_methods with a dict that maps person
        entity_id to a list of dicts with contact info:

            person_id: [ {'contact_type': <const>,
                          'source_system': <const>,
                          'value': <str>, },
                         ... ],
            ...

        """
        if not hasattr(self, '_person_authn_methods'):
            timer = make_timer(self.logger,
                               'Fetching authentication methods...')
            entity = Entity.EntityContactInfo(self.db)
            self._person_authn_methods = dict()

            # Find the unique systems and contact types for filtering
            source_systems = set(
                (v[0] for s in self.person_authn_selection.itervalues()
                 for v in s))
            contact_types = set(
                (v[1] for s in self.person_authn_selection.itervalues()
                 for v in s))

            if not source_systems or not contact_types:
                # No authn methods to cache
                return self._person_authn_methods

            # Cache contact info
            count = 0
            for row in entity.list_contact_info(
                    entity_type=self.const.entity_person,
                    source_system=list(source_systems),
                    contact_type=list(contact_types)):
                c_type = self.const.ContactInfo(row['contact_type'])
                system = self.const.AuthoritativeSystem(row['source_system'])
                self._person_authn_methods.setdefault(int(
                    row['entity_id']), list()).append({
                        'value':
                        six.text_type(row['contact_value']),
                        'contact_type':
                        c_type,
                        'source_system':
                        system,
                    })
                count += 1
            timer("...authentication methods done.")
        return self._person_authn_methods
Exemplo n.º 42
0
    def init_person_cache(self):
        self.account = Factory.get('Account')(self.db)
        self.accounts = accounts = []
        self.person_cache = person_cache = {}
        self.persons = []
        timer = make_timer(self.logger, "Caching persons and accounts...")
        for row in self.list_persons():
            accounts.append(row['account_id'])
            person_cache[row['person_id']] = {'account_id': row['account_id'],
                                              'ou_id': row['ou_id']}

        self.persons = self.person_cache.keys()
        timer("...caching done, got %d persons and %d accounts." %
              (len(self.persons), len(self.accounts)))
Exemplo n.º 43
0
 def __init__(self, emne_file, aktivitet_file, enhet_file):
     timer = make_timer(logger, 'Initing StudinfoParsers...')
     self.emnekode2info = self._parse_emner(emne_file)
     self.undervisningsaktiviteter = self._parse_undervisningsaktivitet(aktivitet_file)
     self.undervisningsenheter = self._parse_undervisningenheter(enhet_file)
     # The current emne query does not fetch emnenavn_bokmal.  If it did,
     # we could avoid this pre-parsing and use generators instead
     for entry in self.undervisningsenheter:
         tmp = self.emnekode2info.get(entry['emnekode'])
         if not tmp:
             logger.info("Enhet for ukjent emne: %s", dict(entry))
         else:
             tmp['emnenavn_bokmal'] = entry['emnenavn_bokmal']
     timer('... done initing StudinfoParsers')
Exemplo n.º 44
0
    def cache_groups_and_users(self):
        if len(self.group2groups) or len(self.group2users):
            return

        def get_children_not_in_group2groups():
            children = set()
            map(children.update, self.group2groups.itervalues())
            return children.difference(self.group2groups.keys())

        timer = make_timer(self.logger, 'Starting cache_groups_and_users...')

        spread = []
        for s in ('filegroup', 'netgroup'):
            if s in self.spread_d:
                spread += self.spread_d[s]

        assert spread

        for row in self.grp.search_members(
                member_type=self.const.entity_group,
                spread=spread):
            self.group2groups[row['group_id']].add(row['member_id'])

        for row in self.grp.search_members(
                member_type=self.const.entity_account,
                member_spread=self.spread_d['user'][0],
                spread=spread):
            self.group2users[row['group_id']].add(row['member_id'])

        children_groups = get_children_not_in_group2groups()
        extra_groups = children_groups.copy()
        while children_groups:
            for group_id in children_groups:
                self.group2groups[group_id] = set()
            for row in self.grp.search_members(
                    member_type=self.const.entity_group,
                    group_id=children_groups):
                member_id = row['member_id']
                self.group2groups[row['group_id']].add(member_id)
                extra_groups.add(member_id)
            children_groups = get_children_not_in_group2groups()

        if extra_groups:
            for row in self.grp.search_members(
                    member_type=self.const.entity_account,
                    member_spread=self.spread_d['user'][0],
                    group_id=extra_groups):
                self.group2users[row['group_id']].add(row['member_id'])

        timer('... done cache_groups_and_users')
Exemplo n.º 45
0
 def load_auth_tab(self, auth_meth=None):
     timer = make_timer(self.logger, 'Starting load_auth_tab...')
     self.a_meth = self.auth_methods(auth_meth)
     if not self.a_meth:
         timer('... done load_auth_tab')
         return
     self.auth_data = defaultdict(dict)
     for x in self.posuser.list_account_authentication(
             auth_type=self.a_meth, spread=self.spread_d['user']):
         if not x['account_id'] or not x['method']:
             continue
         acc_id, meth = int(x['account_id']), int(x['method'])
         self.auth_data[acc_id][meth] = x['auth_data']
     timer('... done load_auth_tab')
Exemplo n.º 46
0
 def load_auth_tab(self, auth_meth=None):
     timer = make_timer(self.logger, 'Starting load_auth_tab...')
     self.a_meth = self.auth_methods(auth_meth)
     if not self.a_meth:
         timer('... done load_auth_tab')
         return
     self.auth_data = defaultdict(dict)
     for x in self.posuser.list_account_authentication(auth_type=self.a_meth,
                                                       spread=self.spread_d['user']):
         if not x['account_id'] or not x['method']:
             continue
         acc_id, meth = int(x['account_id']), int(x['method'])
         self.auth_data[acc_id][meth] = x['auth_data']
     timer('... done load_auth_tab')
Exemplo n.º 47
0
 def init_person_names(self):
     # Set self.person_names = dict {person_id: {name_variant: name}}
     timer = make_timer(self.logger, "Fetching personal names...")
     self.person_names = person_names = defaultdict(dict)
     for row in self.person.search_person_names(
             name_variant=[
                 self.const.name_full, self.const.name_first,
                 self.const.name_last
             ],
             person_id=self.persons,
             source_system=self.const.system_cached):
         person_id = int(row['person_id'])
         person_names[person_id][int(row['name_variant'])] = row['name']
     timer("...personal names done.")
Exemplo n.º 48
0
 def init_ou_structure(self):
     # Set self.ou_tree = dict {parent ou_id: [child ou_id, ...]}
     # where the root OUs have parent id None.
     timer = make_timer(self.logger, "Fetching OU tree...")
     self.ou.clear()
     ou_list = self.ou.get_structure_mappings(
         self.const.OUPerspective(cereconf.LDAP_OU['perspective']))
     self.logger.debug("OU-list length: %d", len(ou_list))
     self.ou_tree = {None: []}  # {parent ou_id or None: [child ou_id...]}
     for ou_id, parent_id in ou_list:
         if parent_id is not None:
             parent_id = int(parent_id)
         self.ou_tree.setdefault(parent_id, []).append(int(ou_id))
     timer("...OU tree done.")
Exemplo n.º 49
0
 def init_ou_structure(self):
     # Set self.ou_tree = dict {parent ou_id: [child ou_id, ...]}
     # where the root OUs have parent id None.
     timer = make_timer(self.logger, "Fetching OU tree...")
     self.ou.clear()
     ou_list = self.ou.get_structure_mappings(
         self.const.OUPerspective(cereconf.LDAP_OU['perspective']))
     self.logger.debug("OU-list length: %d", len(ou_list))
     self.ou_tree = {None: []}  # {parent ou_id or None: [child ou_id...]}
     for ou_id, parent_id in ou_list:
         if parent_id is not None:
             parent_id = int(parent_id)
         self.ou_tree.setdefault(parent_id, []).append(int(ou_id))
     timer("...OU tree done.")
Exemplo n.º 50
0
    def cache_groups_and_users(self):
        if len(self.group2groups) or len(self.group2users):
            return

        def get_children_not_in_group2groups():
            children = set()
            map(children.update, self.group2groups.itervalues())
            return children.difference(self.group2groups.keys())

        timer = make_timer(self.logger, 'Starting cache_groups_and_users...')

        spread = []
        for s in ('filegroup', 'netgroup'):
            if s in self.spread_d:
                spread += self.spread_d[s]

        assert spread

        for row in self.grp.search_members(member_type=self.const.entity_group,
                                           spread=spread):
            self.group2groups[row['group_id']].add(row['member_id'])

        for row in self.grp.search_members(
                member_type=self.const.entity_account,
                member_spread=self.spread_d['user'][0],
                spread=spread):
            self.group2users[row['group_id']].add(row['member_id'])

        children_groups = get_children_not_in_group2groups()
        extra_groups = children_groups.copy()
        while children_groups:
            for group_id in children_groups:
                self.group2groups[group_id] = set()
            for row in self.grp.search_members(
                    member_type=self.const.entity_group,
                    group_id=children_groups):
                member_id = row['member_id']
                self.group2groups[row['group_id']].add(member_id)
                extra_groups.add(member_id)
            children_groups = get_children_not_in_group2groups()

        if extra_groups:
            for row in self.grp.search_members(
                    member_type=self.const.entity_account,
                    member_spread=self.spread_d['user'][0],
                    group_id=extra_groups):
                self.group2users[row['group_id']].add(row['member_id'])

        timer('... done cache_groups_and_users')
Exemplo n.º 51
0
 def init_person_titles(self):
     # Change from original: Search titles first by system_lookup_order,
     # then within each system let personal title override work title.
     timer = make_timer(self.logger, 'Fetching personal titles...')
     titles = defaultdict(dict)
     for name_type in (self.const.personal_title, self.const.work_title):
         for row in self.person.search_name_with_language(
                 entity_type=self.const.entity_person,
                 name_variant=name_type,
                 name_language=self.languages):
             titles[int(row['entity_id'])].setdefault(
                 int(row['name_language']), row['name'])
     self.person_titles = dict([(p_id, t.items())
                                for p_id, t in titles.items()])
     timer("...personal titles done.")
Exemplo n.º 52
0
    def generate_person(self, outfile, alias_outfile, use_mail_module):
        """Output person tree and aliases if cereconf.LDAP_PERSON['dn'] is set.

        Aliases are only output if cereconf.LDAP_PERSON['aliases'] is true.

        If use_mail_module is set, persons' e-mail addresses are set to
        their primary users' e-mail addresses.  Otherwise, the addresses
        are taken from contact info registered for the individual persons."""
        if not self.person_dn:
            return
        self.init_person_dump(use_mail_module)
        if self.person_parent_dn not in (None, self.org_dn):
            outfile.write(container_entry_string('PERSON'))
        timer = make_timer(self.logger, "Processing persons...")
        round_timer = make_timer(self.logger)
        rounds = 0
        exported = 0
        for person_id, row in self.person_cache.iteritems():
            if rounds % 10000 == 0 and rounds != 0:
                round_timer("...processed %d rows..." % rounds)
            rounds += 1
            dn, entry, alias_info = self.make_person_entry(row, person_id)
            if dn:
                if dn in self.used_DNs:
                    self.logger.warn(
                        "Omitting person_id %d: duplicate DN '%s'" %
                        (person_id, dn))
                else:
                    self.used_DNs[dn] = True
                    outfile.write(entry_string(dn, entry, False))
                    if self.aliases and alias_info:
                        self.write_person_alias(alias_outfile, dn, entry,
                                                alias_info)
                    exported += 1
        timer("...persons done, %d exported and %d omitted." %
              (exported, rounds - exported))
Exemplo n.º 53
0
 def init_person_titles(self):
     # Change from original: Search titles first by system_lookup_order,
     # then within each system let personal title override work title.
     timer = make_timer(self.logger, 'Fetching personal titles...')
     titles = defaultdict(dict)
     for name_type in (self.const.personal_title, self.const.work_title):
         for row in self.person.search_name_with_language(
                 entity_type=self.const.entity_person,
                 name_variant=name_type,
                 name_language=self.languages):
             titles[int(row['entity_id'])].setdefault(
                 int(row['name_language']), row['name'])
     self.person_titles = dict([(p_id, t.items())
                                for p_id, t in titles.items()])
     timer("...personal titles done.")
Exemplo n.º 54
0
 def user_ldif(self, filename=None, auth_meth=None):
     """Generate posix-user."""
     timer = make_timer(self.logger, 'Starting user_ldif...')
     self.init_user(auth_meth)
     f = LDIFutils.ldif_outfile('USER', filename, self.fd)
     f.write(LDIFutils.container_entry_string('USER'))
     for row in self.posuser.list_extended_posix_users(
             self.user_auth,
             spread=self.spread_d['user'],
             include_quarantines=False):
         dn, entry = self.user_object(row)
         if dn:
             f.write(LDIFutils.entry_string(dn, entry, False))
     LDIFutils.end_ldif_outfile('USER', f, self.fd)
     timer('... done user_ldif')
Exemplo n.º 55
0
 def user_ldif(self, filename=None, auth_meth=None):
     """Generate posix-user."""
     timer = make_timer(self.logger, 'Starting user_ldif...')
     self.init_user(auth_meth)
     f = LDIFutils.ldif_outfile('USER', filename, self.fd)
     f.write(LDIFutils.container_entry_string('USER'))
     for row in self.posuser.list_extended_posix_users(
             self.user_auth,
             spread=self.spread_d['user'],
             include_quarantines=False):
         dn, entry = self.user_object(row)
         if dn:
             f.write(LDIFutils.entry_string(dn, entry, False))
     LDIFutils.end_ldif_outfile('USER', f, self.fd)
     timer('... done user_ldif')
Exemplo n.º 56
0
 def cache_account2name(self):
     """Cache account_id to username.
        This one is a bit more lenient that what the self.id2uname
        dictionary contains, as it blindly adds users with correct
        spread."""
     if not self.get_name:
         return
     if len(self.account2name) > 0:
         return
     timer = make_timer(self.logger, 'Starting cache_account2name...')
     self.account2name = dict([
         (x['entity_id'], x['entity_name']) for x in
         self.posuser.list_names(self.const.account_namespace,
                                 spreads=self.spread_d['user'])])
     timer('... done cache_account2name')
Exemplo n.º 57
0
 def __init__(self, emne_file, aktivitet_file, enhet_file):
     timer = make_timer(logger, 'Initing StudinfoParsers...')
     self.emnekode2info = self._parse_emner(emne_file)
     self.undervisningsaktiviteter = self._parse_undervisningsaktivitet(
         aktivitet_file)
     self.undervisningsenheter = self._parse_undervisningenheter(enhet_file)
     # The current emne query does not fetch emnenavn_bokmal.  If it did,
     # we could avoid this pre-parsing and use generators instead
     for entry in self.undervisningsenheter:
         tmp = self.emnekode2info.get(entry['emnekode'])
         if not tmp:
             logger.info("Enhet for ukjent emne: %s", dict(entry))
         else:
             tmp['emnenavn_bokmal'] = entry['emnenavn_bokmal']
     timer('... done initing StudinfoParsers')
Exemplo n.º 58
0
    def person_authn_methods(self):
        """ Returns a contact info mapping for update_person_authn.

        Initializes self.person_authn_methods with a dict that maps person
        entity_id to a list of dicts with contact info:

            person_id: [ {'contact_type': <const>,
                          'source_system': <const>,
                          'value': <str>, },
                         ... ],
            ...

        """
        if not hasattr(self, '_person_authn_methods'):
            timer = make_timer(self.logger,
                               'Fetching authentication methods...')
            entity = Entity.EntityContactInfo(self.db)
            self._person_authn_methods = dict()

            # Find the unique systems and contact types for filtering
            source_systems = set(
                (v[0] for s in self.person_authn_selection.itervalues()
                 for v in s))
            contact_types = set(
                (v[1] for s in self.person_authn_selection.itervalues()
                 for v in s))

            if not source_systems or not contact_types:
                # No authn methods to cache
                return self._person_authn_methods

            # Cache contact info
            count = 0
            for row in entity.list_contact_info(
                    entity_type=self.const.entity_person,
                    source_system=list(source_systems),
                    contact_type=list(contact_types)):
                c_type = self.const.ContactInfo(row['contact_type'])
                system = self.const.AuthoritativeSystem(row['source_system'])
                self._person_authn_methods.setdefault(
                    int(row['entity_id']), list()).append({
                        'value': six.text_type(row['contact_value']),
                        'contact_type': c_type,
                        'source_system': system,
                    })
                count += 1
            timer("...authentication methods done.")
        return self._person_authn_methods