Пример #1
0
    def main(self):
        self.parse_options()

        self.db = Factory.get('Database')()
        self.co = Factory.get('Constants')(self.db)
        self.group = Factory.get('Group')(self.db)
        self.posix_user = Factory.get('PosixUser')(self.db)
        self.posix_group = PosixGroup.PosixGroup(self.db)
        self._namecachedtime = mx.DateTime.now()

        self._num = 0
        self.posix_users = []
        self.e_id2name = {}
        self.p_id2name = {}
        self.auth_data = {}
        self.disk_tab = {}
        self.shell_tab = {}
        self.quarantines = {}
        self.filegroups = {}
        self.netgroups = {}
        self.host_netgroups = {}
        self.account2def_group = {}
        self.g_id2gid = {}
        self.a_id2owner = {}
        self.a_id2home = {}
        self._names = set()

        self.setup()
        self.generate_files()
Пример #2
0
    def setUpClass(cls):
        """
        Set up this TestCase module.

        This setup code sets up shared objects between each tests. This is done
        *once* before running any of the tests within this class.
        """

        # TODO: We might want this basic class setup in other TestCases. Maybe
        #       set up a generic TestCase class to inherit common stuff from?
        cls._db = Factory.get('Database')()
        cls._db.cl_init(change_program='nosetests')
        cls._db.commit = cls._db.rollback  # Let's try not to screw up the db

        cls._ac = Factory.get('Account')(cls._db)
        cls._ac = Account(cls._db)
        cls._co = Factory.get('Constants')(cls._db)

        # Data sources
        cls.account_ds = BasicAccountSource()
        cls.person_ds = BasicPersonSource()

        # Tools for creating and destroying temporary db items
        cls.db_tools = DatabaseTools(cls._db)
        cls.db_tools._ac = cls._ac
Пример #3
0
def main():
    global dryrun
    global logger
    logger = Factory.get_logger("cronjob")

    options, rest = getopt.getopt(sys.argv[1:],
                                  "do:",
                                  ["dryrun", "ou-file="])
    dryrun = False
    filename = None
    for option, value in options:
        if option in ("-d", "--dryrun"):
            dryrun = True
        elif option in ("-o", "--ou-file",):
            source_system, filename = value.split(":", 1)

    if not filename:
        logger.error("Missing OU input file")
        sys.exit(1)

    db = Factory.get("Database")()
    db.cl_init(change_program="import_SAP")
    
    parser = system2parser(source_system)
    process_OUs(db, parser(filename, logger))
Пример #4
0
def _test():
    # TODO: This should use the unit-testing framework, and use common
    # constants (which we currently don't have for spreads)
    cereconf.QUARANTINE_RULES = {
        'nologin': {'lock': 1, 'shell': 'nologin-shell', 'sort_num': 10},
        'system': [{'lock': 1, 'shell': 'nologin-shell2', 'sort_num': 2},
                   {'spread': 'AD_account', 'shell': 'ad-shell', 'sort_num': 3}]
    }
    from Cerebrum.Utils import Factory
    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)

    # Check with old cereconf syntax
    qh = QuarantineHandler(db, (co.quarantine_nologin,))
    print "nolgin: L=%i, S=%s" % (qh.is_locked(), qh.get_shell())

    # New cereconf syntax, non-spread spesific
    qh = QuarantineHandler(db, (co.quarantine_system,))
    print "system: L=%i, S=%s" % (qh.is_locked(), qh.get_shell())

    # spread-spesific quarantine action, should not be locked
    qh = QuarantineHandler(db, (co.quarantine_system,),
                           spreads=(co.spread_uio_ad_account,))
    print "system & AD: L=%i, S=%s" % (qh.is_locked(), qh.get_shell())

    # spread-specific quarantine action and another quarantine that
    # requires lock
    qh = QuarantineHandler(db, (co.quarantine_system, co.quarantine_nologin),
                           spreads=(co.spread_uio_ad_account,))
    print "system & AD & L: L=%i, S=%s" % (qh.is_locked(), qh.get_shell())

    qh = QuarantineHandler.check_entity_quarantines(db, 67201)
    print "An entity: L=%i, S=%s" % (qh.is_locked(), qh.get_shell())
Пример #5
0
def main():
    global logger, const, cerebrum_db, xmlwriter
    logger = Factory.get_logger("cronjob")
    logger.info("generating a new XML for export_ACL")

    cerebrum_db = Factory.get("Database")()
    const = Factory.get("Constants")(cerebrum_db)

    opts, rest = getopt.getopt(sys.argv[1:], "f:",
                               ["--out-file=",])
    filename = None
    for option, value in opts:
        if option in ("-f", "--out-file"):
            filename = value
        # fi
    # od

    _cache_id_types()
    stream = AtomicFileWriter(filename)
    xmlwriter = xmlprinter.xmlprinter(stream,
                                      indent_level = 2,
                                      # Human-readable output
                                      data_mode = True,
                                      input_encoding = "latin1")
    generate_report()
    stream.close()
Пример #6
0
    def cacheAccounts(self, account_names):
        """ Cache data for a list of account names, efficiently.

        This function has a bit of an overhead, because it looks up all users
        in the db. It is, however, a lot more efficient than looking up
        individual accounts when there's a lot of L{account_names}.

        @type account_names: set
        @param account_names:
            An iterable (ideally a set) of account names to cache data for.

        """
        ac = Factory.get('Account')(self.db)
        pe = Factory.get('Person')(self.db)

        # Save some time
        if not account_names:
            return

        # Fetch all accounts. ...would be nice to filter by names in the query
        all_accounts = ac.search(owner_type=self.co.entity_person)

        # self.accounts - Account and owner id for all candidates. Dict map:
        #   account_name -> {account_id -> , owner_id -> ,}
        filtered_accounts = filter(lambda a: a['name'] in account_names,
                                   all_accounts)
        self.accounts = dict((a['name'], {
            'account_id': a['account_id'],
            'owner_id': a['owner_id']}) for a in filtered_accounts)

        # self.mobiles - Look up the mobile phone number (from FS) for all
        # candidates. Dict mapping:
        #   person_id -> mobile number
        owners = set([a['owner_id'] for a in self.accounts.values()])
        if owners:
            self.mobiles = dict((mob['entity_id'], mob['contact_value']) for mob in
                    pe.list_contact_info(source_system=self.co.system_fs,
                                         contact_type=self.co.contact_mobile_phone,
                                         entity_type=self.co.entity_person, 
                                         entity_id=owners))

        # self.spreads - The spreads of all candidates. List of tuples: 
        #   (account_id, spread_code)
        account_ids = set([a['account_id'] for a in self.accounts.values()])
        for s in cereconf.DIGEKS_CANDIDATE_SPREADS:
            spread = self.co.Spread(s)
            spreads = filter(lambda s: s['entity_id'] in account_ids, ac.list_all_with_spread(spread))
            self.spreads.extend(spreads)

        # Quarantines
        quarantines = []
        if len(account_ids) > 0:
            quarantines = ac.list_entity_quarantines(
                    entity_types=self.co.entity_account,
                    entity_ids=account_ids, 
                    only_active=False)
        for q in quarantines:
            if q['entity_id'] not in self.quarantined.keys():
                self.quarantined[q['entity_id']] = []
            self.quarantined[q['entity_id']].append(str(self.co.Quarantine(q['quarantine_type'])))
Пример #7
0
    def __init__(self, subjects, year, version=None, typecode=None, timecode=None):
        self.db = Factory.get('Database')()
        self.db.cl_init(change_program='proc-digeks')
        self.co = Factory.get('Constants')(self.db)
        self.fs = make_fs()

        # TODO: Describe the structure here
        self.exams = set()
        self.candidates = set()

        # FIXME: We shouldn't need to specify subject/semester/...
        if not isinstance(subjects, (list,set,tuple)):
            raise Exception('Subjects must be a (list,set,tuple)')

        self.subjects = subjects
        self.year = year
        self.typecode = typecode # vurdkombkode
        self.timecode = timecode # vurdtidkode
        self.version  = version  # versjonskode

        # Start processing
        #
        self.process_exams()

        all_candidates = set([c.username for c in self.candidates])
        logger.debug('Caching candidate data for %d unique candidates...' % len(all_candidates))
        self.cache = CandidateCache(self.db, all_candidates)
Пример #8
0
def write_fnrupdate_info(outfile):
    """Lager fil med informasjon om alle fødselsnummerendringer"""
    stream = AtomicFileWriter(outfile, 'w')
    writer = xmlprinter.xmlprinter(stream,
                                   indent_level = 2,
                                   # Human-readable output
                                   data_mode = True,
                                   input_encoding = "latin1")
    writer.startDocument(encoding = "iso8859-1")

    db = Factory.get("Database")()
    const = Factory.get("Constants")(db)

    writer.startElement("data", {"source_system" : str(const.system_fs)})

    data = fs.person.list_fnr_endringer()
    for row in data:
        # Make the format resemble the corresponding FS output as close as
        # possible.
        attributes = { "type" : str(const.externalid_fodselsnr), 
                       "new"  : "%06d%05d" % (row["fodselsdato_naverende"],
                                              row["personnr_naverende"]),
                       "old"  : "%06d%05d" % (row["fodselsdato_tidligere"],
                                              row["personnr_tidligere"]),
                       "date" : str(row["dato_foretatt"]),
                     }
        
        writer.emptyElement("external_id", attributes)
    # od

    writer.endElement("data")
    writer.endDocument()
    stream.close()
Пример #9
0
def main():
    global db, constants, account_init, group, posixgroup
    global default_creator_id
    global dryrun, logger

    logger = Factory.get_logger("console")
    
    try:
        opts, args = getopt.getopt(sys.argv[1:],
                                   'f:d',
                                   ['file=',
                                    'dryrun'])
    except getopt.GetoptError:
        usage()

    dryrun = False
    for opt, val in opts:
        if opt in ('-d', '--dryrun'):
            dryrun = True
        elif opt in ('-f', '--file'):
            infile = val

    db = Factory.get('Database')()
    db.cl_init(change_program='import_groups')
    constants = Factory.get('Constants')(db)
    account_init = Factory.get('Account')(db)
    account_init.find_by_name(cereconf.INITIAL_ACCOUNTNAME)
    default_creator_id = account_init.entity_id
    group = Factory.get('Group')(db)
    posixgroup = PosixGroup.PosixGroup(db)

    process_line(infile)
    def __init__(self, db):
        co = Factory.get("Constants")(db)
        pe = Factory.get("Person")(db)

        # observed ous, affs
        affs = set()
        stat = set()
        ous = set()

        data = defaultdict(list)

        for row in pe.list_affiliations():
            affs.add(row['affiliation'])
            stat.add(row['status'])
            ous.add(row['ou_id'])

            # record person by aff, status, ou
            for key in (
                (None,               None,          None),
                (row['affiliation'], None,          None),
                (None,               row['status'], None),
                (None,               None,          row['ou_id']),
                (row['affiliation'], None,          row['ou_id']),
                (None,               row['status'], row['ou_id']),
            ):
                data[key].append(row['person_id'])

        self._data = dict(data)
        self.ous = ous
        self.types = tuple((co.PersonAffiliation(a) for a in affs))
        self.subtypes = tuple((co.PersonAffStatus(s) for s in stat))
Пример #11
0
def main():
    global db, constants, account
    global logger, outfile, person

    outfile = None
    logger = Factory.get_logger("cronjob")
    
    try:
        opts, args = getopt.getopt(sys.argv[1:],
                                   'f:',
                                   ['file='])
    except getopt.GetoptError:
        usage()

    dryrun = False
    for opt, val in opts:
        if opt in ('-f', '--file'):
            outfile = val

    if outfile is None:
        outfile = '/cerebrum/var/cache/MAIL/mail_data.dat'

    db = Factory.get('Database')()
    constants = Factory.get('Constants')(db)
    account = Factory.get('Account')(db)
    person = Factory.get('Person')(db)

    email_data = generate_email_data()
    write_email_file(email_data, outfile)
Пример #12
0
def main(args=None):
    ENCODING = 'utf-8'
    logger = Factory.get_logger('cronjob')
    db = Factory.get(b'Database')()
    co = Factory.get(b'Constants')(db)

    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-o', '--output', default='/tmp/report.html')
    commands = parser.add_subparsers(help="available commands")

    # name
    name_command = commands.add_parser(
        'name',
        help="Generate report on differences in names.")
    name_command.set_defaults(func=compare_names)
    name_command.set_defaults(check_system=co.system_sap)
    name_command.add_argument(
        'source_system',
        type=partial(argparse_const, db, co.AuthoritativeSystem))

    args = parser.parse_args(args)
    command = args.func
    del args.func

    # Other commands?
    logger.info('Generating report ({!s})'.format(args.output))
    af = AtomicFileWriter(args.output)

    report = command(db, logger, args)
    report.find('head/meta[@charset]').set('charset', ENCODING)
    af.write("<!DOCTYPE html>\n")
    af.write(ElementTree.tostring(report, encoding=ENCODING))

    af.close()
    logger.info('Done')
Пример #13
0
def compare_names(db, logger, args):
    """ Generates an XML report for missing names. """
    co = Factory.get(b'Constants')(db)
    pe = Factory.get(b'Person')(db)
    variants = [co.PersonName(t[0]) for t in pe.list_person_name_codes()]

    logger.debug("Fetching names from {!s}".format(args.check_system))
    to_check = dict()
    for name in get_names(db, args.check_system, variants):
        to_check.setdefault(name.pid, dict())[name.variant] = name

    logger.debug("Fetching names for {:d} persons from {!s}".format(
        len(to_check), args.source_system))
    diff = dict()
    for name in get_names(
            db, args.source_system, variants, pid=to_check.keys()):
        if name.variant not in to_check[name.pid]:
            continue
        if to_check[name.pid][name.variant].value != name.value:
            diff.setdefault(name.pid, []).append(
                (name, to_check[name.pid][name.variant]))

    logger.debug("Generating report ({:d} names)".format(len(diff)))
    report = generate_report('Names', diff)
    logger.debug("Done generating report")
    return report
Пример #14
0
def get_authorized_hosts(machine_list):
    db = Factory.get('Database')()
    gr = Factory.get('Group')(db)
    co = Factory.get('Constants')(db)

    def lookup_gids(groups):
        l = []
        for group in groups:
            gr.clear()
            try:
                gr.find_by_name(group[1:])
            except Errors.NotFoundError:
                continue
            l.append(gr.entity_id)
        return l

    groups = filter(lambda x: x.startswith('@'), machine_list)
    machines = set(machine_list) - set(groups)

    machines.update(map(lambda x: x['member_name'],
                        gr.search_members(group_id=lookup_gids(groups),
                                          indirect_members=True,
                                          member_type=co.entity_dns_owner,
                                          include_member_entity_name=True)))

    return map(lambda x: socket.gethostbyname(x), machines)
Пример #15
0
def generate_output(stream, do_employees, sysname, person_file):
    """
    Create dump for UA
    """
    db_person = Factory.get("Person")(db)
    ou = Factory.get("OU")(db)
    const = Factory.get("Constants")(db)

    if do_employees:
        logger.info("Extracting employee info from %s", person_file)

        source_system = getattr(const, sysname)
        parser = system2parser(sysname)(person_file, logger, False)

        # Go through all persons in person_info_file
        for xml_person in parser.iter_person():
            try:
                fnr = xml_person.get_id(xml_person.NO_SSN)
                if fnr is None:
                    sapnr = xml_person.get_id(xml_person.SAP_NR)
                    logger.warn('Employee %s has no fnr', sapnr)
                    continue
                db_person.find_by_external_id(const.externalid_fodselsnr, fnr,
                                              source_system=source_system)
            except Errors.NotFoundError:
                logger.warn("Couldn't find person with fnr %s in db", fnr)
                continue

            process_employee(db_person, ou, const, xml_person, fnr, stream)
            db_person.clear()
Пример #16
0
def fill_account(acct, db, co, data):
    """Look up account and fill data

    data should contain:
    old = key → set of group ids
    new = set of group ids
    owner = owner id
    """
    g = Factory.get('Group')(db)
    a = Factory.get('Account')(db)
    a.find(acct)
    ats = a.get_account_types()
    own = data['person'].get(a.owner_id)
    if own is None:
        own = fill_person(a.owner_id, db, co, data)
    ret = {
        'old': {},
        'new': set(),
        'owner': own,
        'ignoregroups': data['ignoregroups'],
    }
    for at in ats:
        grps = g.list_ou_groups_for(at['ou_id'], affiliation=at['affiliation'],
                                    member_types=co.virtual_group_ou_accounts,
                                    indirect=False)
        k = key(at['ou_id'], at['affiliation'])
        dct = ret['old'][k] = set()
        for gid in (int(x['group_id']) for x in grps):
            if gid not in data['ignoregroups']:
                dct.add(gid)
                ret['new'].add(gid)
    data['account'][acct] = ret
    return ret
Пример #17
0
    def ac_type_del(self, account_id, affiliation, ou_id):
        """Deletes an account from special groups which represent an
        affiliation at an OU. Delete the group if no members are present."""
        ou = Factory.get("OU")(self.db)
        ou.find(ou_id)

        # Look up the group
        grp_name = "%s %s" % (self._get_ou_acronym(ou), affiliation)
        if not self._group:
            self._group = Factory.get('Group')(self.db)
        try:
            self._group.clear()
            self._group.find_by_name(grp_name)
            self.logger.debug("ac_type_del: Group '%s' found." % grp_name)
            if self._group.has_member(account_id):
                self._group.remove_member(account_id)
                self._group.write_db()
                self.logger.info(
                    "ac_type_del: Account '%s' deleted from group '%s'." %
                    (account_id, grp_name))
            # Deal with empty groups as well
            if len(list(self._group.search_members(
                    group_id=self._group.entity_id,
                    indirect_members=True,
                    member_type=self._co.entity_account))) == 0:
                self._group.delete()
                self._group.write_db()
        except Errors.NotFoundError:
            self.logger.debug(
                "ac_type_del: Group '%s' not found. Nothing to do" % grp_name)
Пример #18
0
def output_text(output_file):
    """
    Initialize data structures and start generating the output.
    """

    output_stream = MinimumSizeWriter(output_file, "w")
    # 1MB is the minimum allowed size for the portal dump.
    # The number is somewhat magic, but it seems sensible
    output_stream.min_size = 1024*1024
    db_cerebrum = Factory.get("Database")()
    logger.debug(cereconf.DB_AUTH_DIR)
    
    logger.debug(Database.__file__)
    db = Database.connect(user="******",
                          service="FSPROD.uio.no",
                          DB_driver=cereconf.DB_DRIVER_ORACLE)
    db_fs = FS(db)
    
    db_person = Factory.get("Person")(db_cerebrum)
    db_account = Factory.get("Account")(db_cerebrum)
    constants = Factory.get("Constants")(db_cerebrum)

    # FS is first. This is intentional.
    lookup_order = [constants.system_fs]
    for authoritative_system_name in cereconf.SYSTEM_LOOKUP_ORDER:
        lookup_order.append(getattr(constants, authoritative_system_name))
    
    rows = db_fs.portal.list_eksmeld()
    logger.debug("Fetched portal information from FS")
    for row in rows:
        output_row(row, output_stream,
                   db_person, db_account, constants,
                   lookup_order)

    output_stream.close()
Пример #19
0
def main():
    global db, logger, const, emailsrv

    logger = Factory.get_logger("console")
    db = Factory.get("Database")()
    const = Factory.get("Constants")(db)
    db.cl_init(change_program="email_dom")
    creator = Factory.get("Account")(db)
    creator.clear()
    creator.find_by_name('bootstrap_account')
    infile = None
    emailsrv = False
    disk_in = host_in = False

    try:
        opts, args = getopt.getopt(sys.argv[1:],
                                   'f:h:d:e',
                                   ['file=',
                                    'disk=',
                                    'host=',
                                    'email-server',
                                    'help',
                                    'dryrun'])
    except getopt.GetoptError, e:
        print e
        usage(1)
Пример #20
0
 def set_fnr2move_student(self, rows):
     # Hent ut personens fodselsnummer + account_id
     self.fnr2move_student = {}
     account = Factory.get('Account')(self.db)
     person = Factory.get('Person')(self.db)
     for r in rows:
         if not is_valid_request(self.br, r['request_id']):
             continue
         account.clear()
         account.find(r['entity_id'])
         person.clear()
         person.find(account.owner_id)
         fnr = person.get_external_id(
             id_type=self.co.externalid_fodselsnr,
             source_system=self.co.system_fs
         )
         if not fnr:
             logger.warn("Not student fnr for: %i" % account.entity_id)
             self.br.delete_request(request_id=r['request_id'])
             self.db.commit()
             continue
         fnr = fnr[0]['external_id']
         self.fnr2move_student.setdefault(fnr, []).append(
             (int(account.entity_id),
              int(r['request_id']),
              int(r['requestee_id'])))
Пример #21
0
def mangle(from_server, to_server, commit):
    db = Factory.get('Database')()
    et = Factory.get('EmailTarget')(db)
    db.cl_init(change_program='update_email_target_server')
    # Yes yes yes, it is quite pretty
    es = EmailServer(db)

    es.clear()
    es.find_by_name(from_server)
    from_server_id = es.entity_id

    es.clear()
    es.find_by_name(to_server)
    to_server_id = es.entity_id

    for row in et.list_email_server_targets():
        if row.has_key('server_id') and row['server_id'] == from_server_id:
            et.clear()
            et.find(row['target_id'])
            old_sid = et.email_server_id
            et.email_server_id = to_server_id
            et.write_db()
            print('Moved %d from %d to %d' % \
                    (et.entity_id, old_sid, to_server_id))

    if commit:
        db.commit()
        print 'Committed all changes'
    else:
        db.rollback()
        print 'Rolled back all changes'
Пример #22
0
def init_globals():
    global db, const, logger, fnr2account_id
    global dump_dir, dryrun, immediate_evu_expire

    # Håndter upper- og lowercasing av strenger som inneholder norske
    # tegn.
    locale.setlocale(locale.LC_CTYPE, ('en_US', 'iso88591'))

    dump_dir = cereconf.FS_DATA_DIR
    dryrun = False
    logger = Factory.get_logger("cronjob")
    immediate_evu_expire = False

    opts, rest = getopt.getopt(sys.argv[1:],
                               "d:r",
                               ["dump-dir=", "dryrun",
                                "immediate-evu-expire",])
    for option, value in opts:
        if option in ("-d", "--dump-dir"):
            dump_dir = value
        elif option in ("-r", "--dryrun"):
            dryrun = True
        elif option in ("--immediate-evu-expire",):
            immediate_evu_expire = True
        # fi
    # od

    db = Factory.get("Database")()
    db.cl_init(change_program='pop_extern_grps')
    const = Factory.get("Constants")(db)

    fnr2account_id = {}
    prefetch_primaryusers()
Пример #23
0
def main():
    global db, co, logger, group_creator, dryrun

    db = Factory.get('Database')()
    db.cl_init(change_program='ifi_auto')
    co = Factory.get('Constants')(db)
    logger = Factory.get_logger("cronjob")
    dryrun = False

    try:
        opts, args = getopt.getopt(sys.argv[1:], '?',
                                   ['dryrun', 'help'])
    except getopt.GetoptError:
        usage()
    for opt, val in opts:
        if opt == '--dryrun':
            dryrun = True
        if opt in ('-?', '--help'):
            usage(0)

    supergroup = "internal:uio.no:fs:{autogroup}"
    fg_supergroup = "internal:uio.no:fs:{ifi_auto_fg}"
    group_creator = get_account(cereconf.INITIAL_ACCOUNTNAME).entity_id
    process_groups(supergroup, fg_supergroup)
    if not dryrun:
        logger.debug("commit...")
        db.commit()
    logger.info("All done")
Пример #24
0
def enforce_user_constraints(db):
    """ Check a number of business rules for our users. """
    account = Factory.get("Account")(db)
    const = Factory.get("Constants")()
    for row in account.list(filter_expired=False):
        # We check FA/VA only
        if row["np_type"] not in (const.fedaccount_type,
                                  const.virtaccount_type):
            continue

        account.clear()
        account.find(row["entity_id"])
        # Expiration is not set -> force it to default
        if row["expire_date"] is None:
            logger.warn("Account %s (id=%s) is missing expiration date.",
                        account.account_name,
                        account.entity_id)
            account.expire_date = now() + account.DEFAULT_ACCOUNT_LIFETIME
            account.write_db()

        # Expiration is too far in the future -> force it to default
        if row["expire_date"] - now() > account.DEFAULT_ACCOUNT_LIFETIME:
            logger.warn("Account %s (id=%s) has expire date too far in the"
                        " future.", account.account_name, account.entity_id)
            account.expire_date = now() + account.DEFAULT_ACCOUNT_LIFETIME
            account.write_db()
Пример #25
0
def main():
    """Main driver for the file generation."""

    global xmlwriter, db, const, logger

    db = Factory.get("Database")()
    const = Factory.get("Constants")(db)
    logger = Factory.get_logger("cronjob")

    try:
        opts, args = getopt.getopt(sys.argv[1:], "o:",
                                   ["out-file="])
    except getopt.GetoptError:
        usage(1)

    filename = None
    for opt, val in opts:
        if opt in ('-o', '--out-file'):
            filename = val
    if not filename:
        usage(1)    

    stream = AtomicFileWriter(filename)
    xmlwriter = xmlprinter.xmlprinter(stream,
                                      indent_level=2,
                                      # human-friendly output
                                      data_mode=True,
                                      input_encoding="UTF-8")
    # Get information about persons
    persons = fetch_person_data()
    # Get information about courses (kurs)
    courses = fetch_course_data()
    # Generate and write document
    generate_document(persons, courses)
    stream.close()
Пример #26
0
def main():
    """Start method for this script."""
    Factory.get_logger("cronjob")

    logger.info("Performing uio-tils/uio-ans group updates")

    try:
        options, rest = getopt.getopt(sys.argv[1:],
                                      "dhs:", ["dryrun",
                                               "help",
                                               "source-spec="])
    except getopt.GetoptError:
        usage()
        sys.exit(1)

    dryrun = False
    for option, value in options:
        if option in ("-d", "--dryrun",):
            dryrun = True
        elif option in ("-h", "--help",):
            usage()
            sys.exit(0)
        elif option in ("-s", "--source-spec"):
            sysname, filename = value.split(":")

    db = Factory.get("Database")()
    db.cl_init(change_program="update_emp_grp")
    perform_update(db, sysname, filename)
    if dryrun:
        logger.info("updates completed. all changes rolled back")
        db.rollback()
    else:
        db.commit()
        logger.info("updates completed. all changes committed")
Пример #27
0
def main():
    global verbose, f, db, co, ldap, auth, start

    parser = argparse.ArgumentParser()
    parser.add_argument('-v', "--verbose", action="count", default=0)
    parser.add_argument('-m', "--mail-file")
    parser.add_argument('-s', "--spread", default=ldapconf('MAIL', 'spread', None))
    parser.add_argument('-i', "--ignore-size", dest="max_change", action="store_const", const=100)
    parser.add_argument('-a', "--no-auth-data", dest="auth", action="store_false", default=True)
    args = parser.parse_args()

    verbose = args.verbose
    auth = args.auth

    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)

    start = now()
    curr = now()

    if verbose:
        logger.debug("Loading the EmailLDAP module...")
    ldap = Factory.get('EmailLDAP')(db)
    if verbose:
        logger.debug("  done in %d sec." % (now() - curr))

    spread = args.spread
    if spread is not None:
        spread = map_spreads(spread, int)

    f = ldif_outfile('MAIL', args.mail_file, max_change=args.max_change)
    get_data(spread)
    end_ldif_outfile('MAIL', f)
Пример #28
0
def init_globals(args):
    global db, const, group, ou, person
    db = Factory.get("Database")()
    const = Factory.get("Constants")(db)
    group = Factory.get("Group")(db)
    person = Factory.get("Person")(db)
    ou = Factory.get("OU")(db)
Пример #29
0
    def __init__(self, db_conn, pe_cls=None, ac_cls=None, gr_cls=None,
                 co_cls=None,):
        """ Initialize with a Cerebrum.Database object. """
        self._db = db_conn
        self._db.commit = self._db.rollback

        if not isinstance(pe_cls, Person):
            pe_cls = Factory.get('Person')
        self._pe = pe_cls(self._db)

        if not isinstance(ac_cls, Account):
            ac_cls = Factory.get('Account')
        self._ac = ac_cls(self._db)

        if not isinstance(gr_cls, Group):
            gr_cls = Factory.get('Group')
        self._gr = gr_cls(self._db)

        if not isinstance(gr_cls, OU):
            ou_cls = Factory.get('OU')
        self._ou = ou_cls(self._db)

        if not isinstance(co_cls, Constants):
            co_cls = Factory.get('Constants')
        self._co = co_cls(self._db)

        self._init_account_id = None
        self._init_group_id = None

        self.constants = []
        self.account_ids = set()
        self.person_ids = set()
        self.group_ids = set()
        self.ou_ids = set()
def get_person_info(db, person, ssn_type, source_system,
                    telephone_types):
    """Collect information about `person`.

    :param Cerebrum.database.Database db: DB connection object.
    :param Cerebrum.Constants._EntityExternalIdCode ssn_type: External id type
        to filter by.
    :param Cerebrum.Constants._AuthoritativeSystemCode source_system: Source
        system to filter by.
    :param Cerebrum.Constants._ContactInfoCode telephone_types: Filter
        telephone entries by type."""
    if isinstance(person, (int, long)):
        pe = Factory.get('Person')(db)
        pe.find(person)
    else:
        pe = person

    co = Factory.get('Constants')(db)

    return {
        'firstname': pe.get_name(source_system, co.name_first),
        'lastname': pe.get_name(source_system, co.name_last),
        'title': 'Mr' if pe.gender == co.gender_male else 'Ms',
        'feide_id': _construct_feide_id(db, pe),
        'email_address': _get_primary_emailaddress(db, pe),
        'phone': _get_phone(db, pe, source_system, telephone_types),
        'ssn': _get_ssn(db, pe, ssn_type, source_system)
    }
Пример #31
0
##  * os.system should be replaced by something better. The new
##    subprocess module (from 2.4) is preferrable, but we must wait
##    til we no longer support version < 2.4.
##

import getopt
import sys
import os
import time
import re
import tempfile
import shutil
import cerebrum_path
from Cerebrum.Utils import Factory

logger = Factory.get_logger("cronjob")
postfix_re = '-\d+-\d+-\d+.*\.tar\.gz'


def find_files(name_pattern, dirname, file_type='file', min_age=0):
    """
    Find all files in dirname matching name_pattern that are older
    than min_age. Note that search is not recursive.
    Return as a list of relative paths.

    @param name_pattern: name pattern (python regexp) of files to delete.
    @type  name_pattern: string
    @param dirname: Directory where to look for files matching name pattern.
    @type  dirname: string
    @param file_type: file_type must be file or directory.
    @type  file_type: string 
Пример #32
0
 def __init__(self, database):
     super(DiskQuota, self).__init__(database)
     self.co = Factory.get('Constants')(database)
Пример #33
0
Adresselinje2
Poststednr          norwegian zip code. Empty for foreign addresses.
Landnavn            Country. Cerebrum has no country information.
Adrmatekode         empty
Registergruppe      This version, only POLS-TILS
Registerkode        empty
Adrtypekode         empty
"""

import cerebrum_path
import cereconf

from Cerebrum.Utils import Factory
from Cerebrum import Errors

db = Factory.get("Database")()
constants = Factory.get("Constants")(db)
logger = Factory.get_logger("console")


def get_name(person):
    """Return person's name.
    The returned name should be on the form:
    Last, First
    And be at most 40 characters wide.
    """
    try:
        logger.debug("Fetching last name from cache")
        last = person.get_name(constants.system_cached, constants.name_last)
    except Errors.NotFoundError:
        try:
Пример #34
0
FORMAT OF DATAFILE

* One MAC-address per line
* Comma-seperated list of all IP-addresses associated with said MAC.

The list of IP-addresses starts at the 20th character on the line; the
space between the MAC-address and the list of IP-addresses is padded
with <space>-characters.

""" % progname

__version__ = "$Revision$"
# $URL$

logger = Factory.get_logger("cronjob")

db = Factory.get('Database')()
db.cl_init(change_program=progname[:16])


def get_data_from_DB():
    """Retrieves all relevant data needed from database

    @rtype: dict
    @return: A dictionary where the keys are MAC-addresses and the
             values are lists of IP-addresses associated with each
             MAC-address.    
    
    """
    ips_by_mac = {}
Пример #35
0
    def process_account_types(self):
        """Feed the handler account_types that should be updated. An
        account_type will result in being member of a special group.
        Traverse the groups and account_types to sync them. In a
        Changelog setting, this information will be fed to the Handler
        by the ChangeLog.

        In this batch job, we generate a map of affiliations and
        groups and send the correct add/del requests to the Handler.
        This is a bit ineffective, but has to be like this to mimic
        the ChangeLog"""
        grp = Factory.get('Group')(self.db)
        ac = Factory.get('Account')(self.db)
        ou = Factory.get('OU')(self.db)

        # Build up a cache of account_types
        ac2aff = {}
        for row in ac.list_accounts_by_type(affiliation=(self.co.affiliation_ansatt,
                                                         self.co.affiliation_teacher,
                                                         self.co.affiliation_elev)):
            ac2aff.setdefault((int(row['affiliation']), int(row['ou_id'])), []).append(row['account_id'])

        group_name_re = re.compile('(\w+)\s+(\w+)')
        # TODO: move into procconf.py
        txt2aff = {'Tilsette': (self.co.affiliation_ansatt, self.co.affiliation_teacher),
                   'Elevar': (self.co.affiliation_elev,)}
        aff_grp2ac = {}
        # Resolve the group into an OU and an affiliation. 
        for row in grp.list_traits(self.co.trait_group_affiliation):
            grp.clear()
            grp.find(row['entity_id'])
            self.logger.debug("Processing '%s'." % grp.group_name)
            m = group_name_re.search(grp.group_name)
            if m:
                affiliation = m.group(2)
                ou_acronym = m.group(1)
            else:
                # Group's name doesn't match the criteria. Fail.
                self.logger.warning("Group '%s' has an odd name for a generated aff group. Skipping" % grp.group_name)
                continue
            ous = ou.search_name_with_language(entity_type=self.co.entity_ou,
                                               name_variant=self.co.ou_name_acronym,
                                               name=ou_acronym,
                                               name_language=self.co.language_nb)
            if len(ous) > 1:
                self.logger.warning("Acronym '%s' results in more than one OU. "
                                    "Skipping" % ou_acronym)
                continue
            if len(ous) == 0:
                self.logger.warning("Acronym '%s' doesn't resolve to an OU." %
                                    ou_acronym)
                # TBD: What to do? Delete the group? Let The Handler deal with it?
                continue
            ou.clear()
            ou.find(ous[0]['entity_id'])
            # Send a delete call to the Handler if the group has accounts in it
            # without the proper account_type.
            for member in grp.search_members(group_id=grp.entity_id):
                member_id = int(member["member_id"])
                for a in txt2aff[affiliation]:
                    if ((int(a), ou.entity_id) in ac2aff and
                            member_id in ac2aff[(int(a), ou.entity_id)]):
                        aff_grp2ac.setdefault(
                            (int(a), ou.entity_id), []
                        ).append(member_id)
                        break
                else:
                    self.proc.ac_type_del(member_id, affiliation, ou.entity_id)

        # Let the handler take take of added account_types.
        for i in ac2aff:
            for account in ac2aff[i]:
                if not (i in aff_grp2ac and account in aff_grp2ac[i]):
                    self.proc.ac_type_add(account, i[0], i[1])
Пример #36
0
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Cerebrum; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.

from __future__ import unicode_literals

import six

from Cerebrum import Errors
from Cerebrum.Utils import Factory, prepare_string, argument_to_sql
from Cerebrum.Entity import EntityName, EntitySpread

Entity_class = Factory.get("Entity")


@six.python_2_unicode_compatible
class Disk(EntitySpread, Entity_class):
    __read_attr__ = ('__in_db', )
    __write_attr__ = ('host_id', 'path', 'description')

    def clear(self):
        """Clear all attributes associating instance with a DB entity."""
        self.__super.clear()
        self.clear_class(Disk)
        self.__updated = []

    def populate(self, host_id, path, description, parent=None):
        """Set instance's attributes without referring to the Cerebrum DB."""
Пример #37
0
def get_disk(database,
             disk_mapping,
             ou_id,
             aff_code,
             status_code,
             perspective,
             ou_class=None,
             constants=None):
    """
    Find the appropriate disk depending on OU, Aff, Status

    This is a hierarchical selection process.
    The selection process is as follows:
      OU+Aff+Status > OU+Aff > OU > parent OU+Aff+Status > parent OU+Aff
      and so on until there is a hit.

    :param disk_mapping: Instance of OUDiskMapping

    :type constants: Cerebrum.Utils._dynamic_Constants
    :param constants: Constants generated with Factory.get

    :type ou_class: Cerebrum.OU.OU or None
    :param ou_class: Unpopulated Ou object

    :type perspective: int or Cerebrum.Constants._OUPerspectiveCode
    :param perspective: Ou perspective

    :type database: Cerebrum.CLDatabase.CLDatabase
    :param database: Database connection

    :param int ou_id: entity id of the OU

    :param aff_code: None or Cerebrum.Constants._PersonAffiliationCode

    :param status_code: None or Cerebrum.Constants._PersonAffStatusCode

    :rtype: int
    :return: The entity id of the disk
    """
    if ou_class is None:
        ou_class = Factory.get("OU")(database)
    if constants is None:
        constants = Factory.get("Constants")(database)

    # Is there a hit for the specific one?
    if status_code:
        try:
            row = disk_mapping.get(ou_id, aff_code, status_code)
        except Errors.NotFoundError:
            pass
        else:
            return row["disk_id"]

    # With just Ou and aff?
    if aff_code:
        try:
            row = disk_mapping.get(ou_id, aff_code, None)
        except Errors.NotFoundError:
            pass
        else:
            return row["disk_id"]

    # With just OU?
    try:
        row = disk_mapping.get(ou_id, None, None)
    except Errors.NotFoundError:
        pass
    else:
        return row["disk_id"]

    # Jump to parent and start over
    ou_class.find(ou_id)
    parent_id = ou_class.get_parent(perspective)
    ou_class.clear()
    disk_id = get_disk(database,
                       disk_mapping,
                       parent_id,
                       aff_code,
                       status_code,
                       perspective,
                       ou_class=ou_class)
    return disk_id
Пример #38
0
"""

import cereconf

import sys
import os
import getopt
from mx.DateTime import now

from Cerebrum import Errors
from Cerebrum.Utils import Factory
from Cerebrum.utils.sms import SMSSender
from Cerebrum.QuarantineHandler import QuarantineHandler

logger = Factory.get_logger('cronjob')
db = Factory.get('Database')()
db.cl_init(change_program='SMS-reminder')
co = Factory.get('Constants')(db)
clconst = Factory.get('CLConstants')(db)
sms = SMSSender(logger=logger)

commit = False


def usage(exitcode=0):
    print """Usage: %(file)s --check-trait TRAIT --set-trait TRAIT [--days DAYS]

    %(doc)s

    --check-trait TRAIT     The trait which is checked for. Only users with the
Пример #39
0
def get_matching_accs(db):
    """ Get defunct account data.

    This function searches the database for accounts where:
      - account is not expired
      - account is owned by a person with no affiliations
      - account has been quarantined for > 1 year

    :return generator:
        A generator that yields dicts with account and quarantine data
    """
    ac = Factory.get('Account')(db)
    pe = Factory.get('Person')(db)
    co = Factory.get('Constants')(db)

    def _u(db_value):
        if db_value is None:
            return text_type('')
        if isinstance(db_value, bytes):
            return db_value.decode(db.encoding)
        return text_type(db_value)

    def _row_to_quar(row):
        """ list_entity_quarantines row to dict """
        return {
            'q_type': text_type(co.Quarantine(row['quarantine_type'])),
            'q_desc': _u(row['description']),
            'q_date': text_type(row['start_date'].strftime('%Y-%m-%d')),
        }

    logger.debug('caching personal accounts ...')
    owner_type = co.entity_person
    accounts = ac.search(owner_type=owner_type)
    logger.info('found %d accounts with owner_type=%r', len(accounts),
                text_type(owner_type))

    logger.debug('caching account homedirs ...')
    acc2disk = dict(
        (r['account_id'], r['path']) for r in ac.list_account_home())
    logger.info('found %d accounts assigned to a disk', len(acc2disk))

    logger.debug('caching active account quarantines ...')
    acc2quar = defaultdict(list)
    for q in ac.list_entity_quarantines(only_active=True,
                                        entity_types=co.entity_account):
        acc2quar[q['entity_id']].append(q)
    logger.info('found quarantines for %d accounts', len(acc2quar))

    logger.debug('caching person names ...')
    person2name = dict(
        (r['person_id'], r['name'])
        for r in pe.search_person_names(name_variant=co.name_full,
                                        source_system=co.system_cached))
    logger.info('found full names for %d persons', len(person2name))

    # Add person_id to the list if the person has an affiliation
    logger.debug('caching person affiliations ...')
    person_has_affs = set((r['person_id'] for r in pe.list_affiliations()))
    logger.info('found %d persons with affiliations', len(person_has_affs))

    for acc in accounts:
        # Is the account owner still affiliated?
        if acc['owner_id'] in person_has_affs:
            continue

        for quar in acc2quar[acc['account_id']]:
            if (quar['start_date'] + DateTimeDelta(365)) < now():
                break
        else:
            # loop terminated wihtout finding a 'quar' -- i.e. no active
            # quarantine older than one year
            continue

        yield {
            'account_name': _u(acc['name']),
            'full_name': _u(person2name.get(acc['owner_id'])) or u'(not set)',
            'disk_path': _u(acc2disk.get(acc['account_id'])) or u'(not set)',
            'q_type': text_type(co.Quarantine(quar['quarantine_type'])),
            'q_desc': _u(quar['description']),
            'q_date': text_type(quar['start_date'].strftime('%Y-%m-%d')),
        }
Пример #40
0
def process(check_trait, set_trait, days, phone_types, message, only_aff):
    logger.info("SMS-reminder started")
    if commit:
        logger.info("In commit, will send out SMS")
    else:
        logger.info("In dryrun, will not send SMS")

    limit_date = now() - days
    logger.debug('Matching only traits newer than: %s', limit_date)

    ac = Factory.get('Account')(db)
    pe = Factory.get('Person')(db)

    target_traits = set(
        t['entity_id'] for t in ac.list_traits(code=check_trait)
        if (t['date'] >= limit_date and  # Filter out old traits.
            t['date'] < (now() - 1)))  # Filter out traits from
    # the last 24 hours.
    logger.debug('Found %d traits of type %s from last %d days to check',
                 len(target_traits), check_trait, days)
    set_traits = set(t['entity_id'] for t in ac.list_traits(code=set_trait)
                     if t['date'] >= limit_date)
    logger.debug('Found %d already set traits of type %s from last %d days',
                 len(set_traits), set_trait, days)
    target_traits.difference_update(set_traits)
    logger.debug('Then %d traits of type %s remains to be checked',
                 len(target_traits), check_trait)

    pe_affs = set()
    if only_aff:
        for a in only_aff:
            pe_affs.update(r['person_id']
                           for r in pe.list_affiliations(affiliation=a))
        logger.debug('Found %d person affiliations to filter by', len(pe_affs))
    else:
        logger.debug('No only_aff specified, so no filtering on affiliation')

    processed = 0

    for account_id in target_traits:
        ac.clear()
        try:
            ac.find(account_id)
        except Errors.NotFoundError:
            logger.error("Could not find user with entity_id: %s, skipping",
                         account_id)
            continue

        if ac.is_expired():
            logger.info("Account %s is expired, skipping", ac.account_name)
            continue
        if QuarantineHandler.check_entity_quarantines(
                db, ac.entity_id).is_locked():
            logger.info("Account %s is quarantined, skipping", ac.account_name)
            continue
        if pe_affs and ac.owner_id not in pe_affs:
            logger.info(
                'Account %s without given person affiliation, skipping',
                ac.account_name)
            continue

        # Check password changes for the user
        if have_changed_password(ac):
            logger.info("Account %s already changed password, skipping",
                        ac.account_name)
            continue

        # Everything ready, should send the SMS
        if send_sms(ac, pe, phone_types, message=message):
            ac.populate_trait(code=set_trait, date=now())
            ac.write_db()
            if commit:
                db.commit()
            else:
                db.rollback()
            logger.debug("Trait set for %s", ac.account_name)
            processed += 1
        else:
            logger.warn('Failed to send SMS to %s', ac.account_name)

    logger.info("SMS-reminder done, %d accounts processed" % processed)
Пример #41
0
    def get_person_to_authn_level_map(self):
        """ Creates a mapping from person_id to (feide_id, level). """
        gr = Factory.get('Group')(self._db)
        co = Factory.get('Constants')(self._db)

        authn_level_query = """
        SELECT DISTINCT fsal.service_id AS service_id,
                        fsi.feide_id AS feide_id,
                        fsal.entity_id AS entity_id,
                        fsal.level AS level,
                        ei.entity_type AS entity_type
        FROM [:table schema=cerebrum name=feide_service_authn_level] fsal,
             [:table schema=cerebrum name=feide_service_info] fsi,
             [:table schema=cerebrum name=entity_info] ei
        WHERE fsal.entity_id=ei.entity_id
        AND fsal.service_id=fsi.service_id"""

        def account_ids_to_person_ids(account_ids):
            """ Takes a sequence of account IDs and returns their owners ID
            if the owner is a person. """
            if not account_ids:
                return []
            binds = {}
            where = [
                argument_to_sql(account_ids, 'ai.account_id', binds, int),
                'ai.owner_id=ei.entity_id',
                argument_to_sql(co.entity_person, 'ei.entity_type', binds, int)
            ]
            where_str = " AND ".join(where)
            sql = """
            SELECT DISTINCT ai.owner_id
            FROM [:table schema=cerebrum name=account_info] ai,
                 [:table schema=cerebrum name=entity_info] ei
            WHERE {}""".format(where_str)
            return [x['owner_id'] for x in self.query(sql, binds)]

        def make_entry(data):
            return (data['feide_id'], data['level'])

        groups = []
        persons = {}
        # Fetch authentication levels for groups and persons
        for authn in self.query(authn_level_query):
            # Persons can be added directly
            if authn['entity_type'] == co.entity_person:
                persons.setdefault(authn['entity_id'],
                                   set()).add(make_entry(authn))
            # ...while groups require extra processing
            elif authn['entity_type'] == co.entity_group:
                groups.append(authn)

        for group in groups:
            # We flatten group memberships and only fetch persons and accounts
            members = gr.search_members(
                group_id=group['entity_id'],
                indirect_members=True,
                member_type=[co.entity_person, co.entity_account])
            account_ids = []
            for member in members:
                # Persons can be added directly
                if member['member_type'] == co.entity_person:
                    persons.setdefault(member['member_id'],
                                       set()).add(make_entry(group))
                # ...while accounts require extra processing
                elif member['member_type'] == co.entity_account:
                    account_ids.append(member['member_id'])
            # Map account IDs to person IDs
            for person_id in account_ids_to_person_ids(account_ids):
                persons.setdefault(person_id, set()).add(make_entry(group))
        return persons
Пример #42
0
        ])
    except getopt.GetoptError, e:
        print e
        usage(1)

    debug = 0
    do_drop = False
    stage = None
    extra_files = []
    db_user = cereconf.CEREBRUM_DATABASE_CONNECT_DATA['table_owner']
    if db_user is None:
        db_user = cereconf.CEREBRUM_DATABASE_CONNECT_DATA['user']
        if db_user is not None:
            print "'table_owner' not set in CEREBRUM_DATABASE_CONNECT_DATA."
            print "Will use regular 'user' (%s) instead." % db_user
    db = Factory.get('Database')(user=db_user)
    db.cl_init(change_program="makedb")

    # Force all Constants-writing to use the same db-connection
    # as CREATE TABLE++
    # TDB: could _CerebrumCode have a classmethod to do this, and
    # also empty all cached constants?
    from Cerebrum.Constants import _CerebrumCode
    _CerebrumCode.sql.fset(None, db)

    meta = Metainfo.Metainfo(db)
    for opt, val in opts:
        if opt in ('-h', '--help'):
            usage()
        if opt in ('-d', '--debug'):
            debug += 1
Пример #43
0
    def bofhd_login(self, uname, password):
        """ Authenticate and create session.

        :param string uname: The username
        :param string password: The password, preferably in latin-1

        :return string:
            If authentication is successful, a session_id registered in
            BofhdSession is returned. This session_id can be used to run
            commands that requires authentication.

        :raise CerebrumError: If the user is not allowed to log in.

        """
        stats_client = statsd.make_client(self.server.stats_config,
                                          prefix="bofhd.login")

        account = Factory.get('Account')(self.db)
        with stats_client.pipeline() as stats:
            try:
                account.find_by_name(uname)
            except Errors.NotFoundError:
                stats.incr('deny-creds')
                self.logger.info(
                    'Failed login for %r from %r: unknown username', uname,
                    format_addr(self.client_address))
                raise CerebrumError("Unknown username or password")

            if not account.verify_auth(password):
                stats.incr('deny-creds')
                self.logger.info(
                    'Failed login for %r from %r: password mismatch', uname,
                    format_addr(self.client_address))
                raise CerebrumError("Unknown username or password")

            # Check quarantines
            quarantines = self._get_quarantines(account)
            if quarantines:
                stats.incr('deny-quarantine')
                self.logger.info('Failed login for %r from %r: quarantines %s',
                                 uname, format_addr(self.client_address),
                                 quarantines)
                raise CerebrumError(
                    'User has active quarantines, login denied: %s' %
                    ', '.join(quarantines))

            # Check expire_date
            if account.is_expired():
                stats.incr('deny-expire')
                self.logger.info(
                    'Failed login for %r from %r: account expired', uname,
                    format_addr(self.client_address))
                raise CerebrumError('User is expired, login denied')

            try:
                self.logger.info('Successful login for %r from %r', uname,
                                 format_addr(self.client_address))
                session = BofhdSession(self.db, self.logger)
                session_id = session.set_authenticated_entity(
                    account.entity_id, self.client_address[0])
                self.db_commit()
                self.server.sessions[session_id] = str(account.entity_id)
                stats.incr('allow')
                return session_id
            except Exception:
                stats.incr('deny-error')
                self.db_rollback()
                raise
Пример #44
0
import getopt
import cereconf

from Cerebrum import Errors
from Cerebrum import Account
from Cerebrum import Group
from Cerebrum.Utils import Factory

from Cerebrum.modules.dns import ARecord
from Cerebrum.modules.dns import CNameRecord
from Cerebrum.modules.dns import DnsOwner
from Cerebrum.modules.dns import HostInfo
from Cerebrum.modules.dns import IPNumber
from Cerebrum.modules.dns import Utils

db = Factory.get('Database')()
db.cl_init(change_program='import_dns')
co = Factory.get('Constants')(db)
sys.argv.extend(["--logger-level", "DEBUG"])
logger = Factory.get_logger("cronjob")
ipnumber = IPNumber.IPNumber(db)
arecord = ARecord.ARecord(db)
cname = CNameRecord.CNameRecord(db)
dnsowner = DnsOwner.DnsOwner(db)
host = HostInfo.HostInfo(db)
mx_set = DnsOwner.MXSet(db)

# logger.setLevel(logger.debug)
header_splitter = r'^; AUTOGENERATED: do not edit below this line'

class Netgroups(object):
Пример #45
0
import cereconf

import sys
import getopt

from collections import defaultdict

from Cerebrum import Errors
from Cerebrum.Utils import Factory
from Cerebrum.modules.xmlutils.system2parser import system2parser
from Cerebrum.modules.xmlutils.object2cerebrum import XML2Cerebrum
from Cerebrum.modules.xmlutils.xml2object import DataEmployment
from Cerebrum.modules.xmlutils.xml2object import DataOU
from Cerebrum.modules.xmlutils.xml2object import DataAddress

db = Factory.get('Database')()
db.cl_init(change_program='import_HR')
const = Factory.get('Constants')(db)
group = Factory.get("Group")(db)
person = Factory.get("Person")(db)
logger = Factory.get_logger("cronjob")

ou_cache = {}


def get_sko((fakultet, institutt, gruppe), system):
    """Lookup the information on a sko, and cache it for later.

    :Parameters:
      fakultet, institutt, gruppe : basestring or number
        sko designation.
Пример #46
0
# Cerebrum is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Cerebrum; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Generates a JSON file with events."""

import cereconf
getattr(cereconf, 'linter', 'must be silent')

from Cerebrum.Utils import Factory

logger = Factory.get_logger('cronjob')


def _parse_selection_criteria(db, criteria):
    """Parse criterias for selection.

    I.e: the string spread:add@account,person will be converted to:
    (co.spread_add, [co.entity_account, co.entity_person])

    :param Cerebrum.Database db: A Database object.
    :param basestring criteria: The criteria to parse.
    :rtype: tuple(Cerebrum.Constants._ChangeTypeCode,
                  list(Cerebrum.Constants._EntityTypeCode,))
    :return: A tuple consisting of the change type, and a list of entity types
        this should be filtered on."""
    t = criteria.split('@')
Пример #47
0
       
    Example:
      Show top-20 users whos paid qouta was reduced from june to august:
      quota_stats.py --from 2004-06-01 --to 2004-08-05 --sort-user-by paid \\
         --user-rows 20 --top-user

      Show usage by faculty:
      quota_stats.py --from 2004-06-01 --to 2004-08-05 --sted-level fak --printjobs

      Show payment statistics:
      quota_stats.py --from 2004-06-01 --to 2004-08-05 --payments

"""

db = Factory.get('Database')()
co = Factory.get('Constants')(db)
ppq = PaidPrinterQuotas.PaidPrinterQuotas(db)


def sort_by_numjobs(a, b):
    """Sorting function for use by 'user_print_stats'"""
    return cmp(abs(b[stat_sort_key]), abs(a[stat_sort_key]))


def user_print_stats(from_date, to_date, sort_by='jobs', num=10):
    """Prints information about single user's printing.

    The top X users, determined by a given criterium are displayed,
    ordered by that criterium. Default is top 10 users by total number
    of pages printed. Other critera are 'jobs', 'free' and 'paid'.
Пример #48
0
#
# You should have received a copy of the GNU General Public License
# along with Cerebrum; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
List statistical information about ephorte persons. 


"""

import sys
import getopt
import cerebrum_path
from Cerebrum.Utils import Factory

logger = Factory.get_logger("console")
db = Factory.get('Database')()
const = Factory.get('Constants')(db)

options = {"detailed_reports": False, "output": sys.stdout}


def present_multi_results(output_stream,
                          result,
                          topic,
                          header=(),
                          line_format=None,
                          line_sep=" " * 3):
    """Generic method for presenting results about multiple data to
    the user.
Пример #49
0
def process_person_callback(person_info):
    """Called when we have fetched all data on a person from the xml
    file.  Updates/inserts name, address and affiliation
    information."""
    
    global no_name
    try:
        fnr = fodselsnr.personnr_ok("%06d%05d" % (int(person_info['fodselsdato']),
                                                  int(person_info['personnr'])))
        fnr = fodselsnr.personnr_ok(fnr)
        logger.info("Process %s " % (fnr))
        (year, mon, day) = fodselsnr.fodt_dato(fnr)
        if (year < 1970
            and getattr(cereconf, "ENABLE_MKTIME_WORKAROUND", 0) == 1):
            # Seems to be a bug in time.mktime on some machines
            year = 1970
    except fodselsnr.InvalidFnrError:
        logger.warn("Ugyldig fødselsnr: %s" % fnr)
        return

    gender = co.gender_male
    if(fodselsnr.er_kvinne(fnr)):
        gender = co.gender_female

    etternavn = fornavn = None
    studentnr = None
    affiliations = []
    address_info = None
    aktiv_sted = []

    # Iterate over all person_info entries and extract relevant data    
    if person_info.has_key('aktiv'):
        for row in person_info['aktiv']:
            if studieprog2sko[row['studieprogramkode']] is not None:
                aktiv_sted.append(int(studieprog2sko[row['studieprogramkode']]))

    for dta_type in person_info.keys():
        x = person_info[dta_type]
        p = x[0]
        if isinstance(p, str):
            continue
        # Get name
        if dta_type in ('fagperson', 'evu', 'aktiv'):
            etternavn = p['etternavn']
            fornavn = p['fornavn']
        if p.has_key('studentnr_tildelt'):
            studentnr = p['studentnr_tildelt']
    
        # Get affiliations
        if dta_type in ('fagperson',):
            _process_affiliation(co.affiliation_tilknyttet,
                                 co.affiliation_status_tilknyttet_fagperson,
                                 affiliations, _get_sko(p, 'faknr',
                                 'instituttnr', 'gruppenr', 'institusjonsnr'))
        elif dta_type in ('aktiv', ):
 	  for row in x:
	      # aktiv_sted is necessary in order to avoid different affiliation statuses
	      # to a same 'stedkode' to be overwritten 
              # e.i. if a person has both affiliations status 'evu' and
	      # aktive to a single stedkode we want to register the status 'aktive'
	      # in cerebrum
              if studieprog2sko[row['studieprogramkode']] is not None:
                  aktiv_sted.append(int(studieprog2sko[row['studieprogramkode']]))
		  _process_affiliation(co.affiliation_student,
				       co.affiliation_status_student_aktiv, affiliations,
				       studieprog2sko[row['studieprogramkode']])
	elif dta_type in ('evu',):
            subtype = co.affiliation_status_student_evu
            if studieprog2sko[row['studieprogramkode']] in aktiv_sted:
                subtype = co.affiliation_status_student_aktiv
            _process_affiliation(co.affiliation_student,
                                 subtype, affiliations,
                                 studieprog2sko[row['studieprogramkode']])

    if etternavn is None:
        logger.debug("Ikke noe navn på %s" % fnr)
        no_name += 1 
        return

    # TODO: If the person already exist and has conflicting data from
    # another source-system, some mechanism is needed to determine the
    # superior setting.
    
    new_person = Factory.get('Person')(db)
    if fnr2person_id.has_key(fnr):
        new_person.find(fnr2person_id[fnr])

    new_person.populate(mx.DateTime.Date(year, mon, day), gender)

    new_person.affect_names(co.system_fs, co.name_first, co.name_last)
    new_person.populate_name(co.name_first, fornavn)
    new_person.populate_name(co.name_last, etternavn)

    if studentnr is not None:
        new_person.affect_external_id(co.system_fs,
                                      co.externalid_fodselsnr,
                                      co.externalid_studentnr)
        new_person.populate_external_id(co.system_fs, co.externalid_studentnr,
                                        studentnr)
    else:
        new_person.affect_external_id(co.system_fs,
                                      co.externalid_fodselsnr)
    new_person.populate_external_id(co.system_fs, co.externalid_fodselsnr, fnr)

    ad_post, ad_post_private, ad_street = _calc_address(person_info)
    for address_info, ad_const in ((ad_post, co.address_post),
                                   (ad_post_private, co.address_post_private),
                                   (ad_street, co.address_street)):
        # TBD: Skal vi slette evt. eksisterende adresse v/None?
        if address_info is not None:
            logger.debug("Populating address...")
            new_person.populate_address(co.system_fs, ad_const, **address_info)

    # if this is a new Person, there is no entity_id assigned to it
    # until written to the database.
    op = new_person.write_db()

    for a in filter_affiliations(affiliations):
        ou, aff, aff_status = a
        new_person.populate_affiliation(co.system_fs, ou, aff, aff_status)
        if include_delete:
            key_a = "%s:%s:%s" % (new_person.entity_id,ou,int(aff))
            if old_aff.has_key(key_a):
                old_aff[key_a] = False
    
    register_cellphone(new_person, person_info)

    op2 = new_person.write_db()
    if op is None and op2 is None:
        logger.info("**** EQUAL ****")
    elif op == True:
        logger.info("**** NEW ****")
    else:
        logger.info("**** UPDATE ****")

    register_fagomrade(new_person, person_info)

    # Reservations    
    if gen_groups:
        should_add = False
        if person_info.has_key('nettpubl'):
            for row in person_info['nettpubl']:
                if row.get('akseptansetypekode', "") == "NETTPUBL" and row.get('status_svar', "") == "J":
                    should_add = True

        if should_add:
            # The student has explicitly given us permission to be
            # published in the directory.
            _add_res(new_person.entity_id)
        else:
            # The student either hasn't registered an answer to
            # the "Can we publish info about you in the directory"
            # question at all, or has given an explicit "I don't
            # want to appear in the directory" answer.
            _rem_res(new_person.entity_id)

    db.commit()
Пример #50
0
`Group' core class.  The DistributionGroup-subclass implements group
attributes necessary for establishing distribution groups in Exchange
(as of 2013 version).

Note that distribution groups come in two flavors, based on what kind
of members they accept. For now only accounts and rooms are allowed."""

import cereconf

from Cerebrum.Utils import Factory
from Cerebrum.Constants import _LanguageCode
from Cerebrum.modules import Email
from Cerebrum import Errors
from .mixins import SecurityGroupMixin, DistributionGroupMixin

Group_class = Factory.get("Group")

assert issubclass(Group_class, SecurityGroupMixin)
assert issubclass(Group_class, DistributionGroupMixin)


# make ready for adding new functionality specific for
# security groups in exchange (i.e. mail enabled sec groups etc).
class SecurityGroup(Group_class):
    # nothing to do here for now
    pass


class DistributionGroup(Group_class):
    """
    The DistributionGroup module implements a specialisation of the `Group'
Пример #51
0
 def co(self):
     """ Constants. """
     return Factory.get('Constants')(self.db)
Пример #52
0
def main():
    global verbose, ou, logger, fnr2person_id, gen_groups, group
    global old_aff, include_delete, no_name
    verbose = 0
    include_delete = False
    logger = Factory.get_logger("cronjob")
    opts, args = getopt.getopt(sys.argv[1:], 'vp:s:e:gdf', [
        'verbose', 'person-file=', 'studieprogram-file=',
        'emne-file=', 'generate-groups','include-delete', ])

    personfile = default_personfile
    studieprogramfile = default_studieprogramfile
    emnefile = default_emnefile
    for opt, val in opts:
        if opt in ('-v', '--verbose'):
            verbose += 1
        elif opt in ('-p', '--person-file'):
            personfile = val
        elif opt in ('-s', '--studieprogram-file'):
            studieprogramfile = val
        elif opt in ('-e', '--emne-file'):
            emnefile = val
        elif opt in ('-g', '--generate-groups'):
            gen_groups = True
        elif opt in ('-d', '--include-delete'):
            include_delete = True
    if "system_fs" not in cereconf.SYSTEM_LOOKUP_ORDER:
        print "Check your config, SYSTEM_LOOKUP_ORDER is wrong!"
        sys.exit(1)
    logger.info("Started")
    ou = Factory.get('OU')(db)

    group = Factory.get('Group')(db)
    try:
        group.find_by_name(group_name)
    except Errors.NotFoundError:
        group.clear()
        ac = Factory.get('Account')(db)
        ac.find_by_name(cereconf.INITIAL_ACCOUNTNAME)
        group.populate(ac.entity_id, co.group_visibility_internal,
                       group_name, group_desc)
        group.write_db()
    if getattr(cereconf, "ENABLE_MKTIME_WORKAROUND", 0) == 1:
        logger.warn("Warning: ENABLE_MKTIME_WORKAROUND is set")

    for s in StudentInfo.StudieprogDefParser(studieprogramfile):
        studieprog2sko[s['studieprogramkode']] = \
            _get_sko(s, 'faknr_studieansv', 'instituttnr_studieansv',
                     'gruppenr_studieansv')

    for e in StudentInfo.EmneDefParser(emnefile):
        emne2sko[e['emnekode']] = \
            _get_sko(e, 'faknr_reglement', 'instituttnr_reglement',
                     'gruppenr_reglement')
        
    # create fnr2person_id mapping, always using fnr from FS when set
    person = Factory.get('Person')(db)
    if include_delete:
        old_aff = _load_cere_aff()
    fnr2person_id = {}
    for p in person.list_external_ids(id_type=co.externalid_fodselsnr):
        if co.system_fs == p['source_system']:
            fnr2person_id[p['external_id']] = p['entity_id']
        elif not fnr2person_id.has_key(p['external_id']):
            fnr2person_id[p['external_id']] = p['entity_id']
    StudentInfo.StudentInfoParser(personfile, process_person_callback, logger)
    if include_delete:
        rem_old_aff()
    db.commit()
    logger.info("Found %d persons without name." % no_name)
    logger.info("Completed")
def write_mail_dns():
    """ Gather data and dump to ldif. """
    logger = Factory.get_logger('cronjob')

    hosts, cnames, lower2host, hosts_only_mx = get_hosts_and_cnames()

    # email domains (lowercase -> domain), in alphabetical order
    domains = OrderedDict((d.lower(), d) for d in sorted(get_email_domains()))

    domain_wo_mx = set()
    for domain in domains:
        # Verify that domains have an MX-record.
        for arg in cereconf.LDAP_MAIL_DNS['dig_args']:
            zone = arg[0]
            if domain.endswith(zone) and not (domain in hosts_only_mx
                                              or domain in hosts):
                logger.error("email domain without MX defined: %s" % domain)
                domain_wo_mx.add(domain.lower())
        # Valid email domains only requires MX
        if domain in hosts_only_mx:
            hosts_only_mx.remove(domain)

    for host in hosts_only_mx:
        logger.warn(
            "MX defined but no A/AAAA record or valid email domain: %s" % host)

    if domain_wo_mx:
        cause = "{0:d} email domains without mx".format(len(domain_wo_mx))
        logger.error("{0}, this must be rectified manually!".format(cause))
        raise CerebrumError(cause)

    def handle_domain_host(entry, host):
        entry["host"] = (lower2host[host], )
        for cname in hosts[host]:
            if cname not in domains:
                entry["cn"].add(lower2host[cname])
                del cnames[cname]
        del hosts[host]

    lw = LDIFutils.LDIFWriter('MAIL_DNS', filename=None)
    dn_suffix = lw.getconf('dn')
    lw.write_container()

    for domain, output in domains.items():
        dn = "cn=%s,%s" % (output, dn_suffix)
        entry = {"cn": set((output, )), "objectClass": ("uioHost", )}
        try:
            if domain in cnames:
                # This fails `if domain not in hosts`
                entry["cn"].add(lower2host[cnames[domain]])
                handle_domain_host(entry, cnames[domain])
            elif domain in hosts:
                handle_domain_host(entry, domain)
        except Exception:
            logger.error(
                "domain=%r, cnames[domain]=%r, "
                "in hosts=%r, in cnames=%r", domain, cnames.get(domain), domain
                in hosts, domain in cnames)
            raise
        lw.write_entry(dn, entry)

    for host in sorted(hosts.keys()):
        l2h = lower2host[host]
        names = set(lower2host[cname] for cname in hosts[host])
        names.add(l2h)
        lw.write_entry("host=%s,%s" % (l2h, dn_suffix), {
            "host": (l2h, ),
            "cn": names,
            "objectClass": ("uioHost", )
        })

    lw.close()
Пример #54
0
 def clconst(self):
     """ CLConstants. """
     return Factory.get('CLConstants')(self.db)
Пример #55
0
def build_employee_cache(db, sysname, filename):
    """Build a mapping of primary account names for employees to their
       employment status.
 
    Employment status in this case is a pair of booleans, that tell whether
    the person with that primary account has tilsettinger and bilag that we
    need.

    :Parameters:
      db : a Database instance
        DB connection to Cerebrum.
      sysname : basestring
        Name of the authoritative system whence the data comes
      filename : basestring
        XML file name (source file)
    """

    logger.debug("Building employee cache")

    # Cache *all* primary accounts. This helps us bind a primary account to an
    # fnr in the XML data.
    db_person = Factory.get("Person")(db)
    const = Factory.get("Constants")(db)
    logger.debug("Fetching all fnr->account mappings...")
    fnr2uname = db_person.getdict_external_id2primary_account(
                                      const.externalid_fodselsnr)
    logger.debug("... done (%d mappings)", len(fnr2uname))
    logger.debug("Fetching all passport-nr->account mappings...")
    pnr2uname = db_person.getdict_external_id2primary_account(
                                      const.externalid_pass_number)
    logger.debug("... done (%d mappings)", len(pnr2uname))

    # Build mapping for the employees
    parser = system2parser(sysname)(filename, logger, False)
    # mapping from uname to employment status
    employee_cache = dict()
    for xmlperson in parser.iter_person():
        fnr = xmlperson.get_id(xmlperson.NO_SSN)
        passport_nr = xmlperson.get_id(xmlperson.PASSNR)

        if not fnr and not passport_nr:
            logger.debug("Person %s has no fnr or passport-nr in XML source",
                         list(xmlperson.iterids()))
            continue

        # Everyone with bilag more recent than 180 days old is eligible
        bilag = filter(lambda x: ((not x.end) or
                                  (x.end >= (Date(*time.localtime()[:3]) -
                                             DateTimeDeltaFromDays(180)))) and
                       x.kind == x.BILAG,
                       xmlperson.iteremployment())

        # Add to cache, if found in Cerebrum either by fnr or passport-nr.
        # each entry is a pair, telling whether the person has active
        # tilsetting and bilag (in that order). We do not need to know *what*
        # they are, only that they exist.
        if fnr in fnr2uname:
            employee_cache[fnr2uname[fnr]] = (xmlperson.
                                              has_active_employments(),
                                              bool(bilag))
        elif passport_nr in pnr2uname:
            employee_cache[pnr2uname[passport_nr]] = (xmlperson.
                                                      has_active_employments(),
                                                      bool(bilag))
        else:
            logger.debug("Cerebrum failed to find primary account for person "
                         "with fnr: %s, passport-nr: %s.", fnr, passport_nr)

    # IVR 2007-07-13 FIXME: Is this actually useful?
    del fnr2uname
    del pnr2uname
    logger.debug("employee_cache has %d uname->employment status mappings",
                 len(employee_cache))
    return employee_cache
Пример #56
0
def do_sillydiff(dirname, oldfile, newfile, outfile):
    today = time.strftime("%d.%m.%Y")
    try:
        oldfile = open(os.path.join(dirname, oldfile), "r")
        line = oldfile.readline()
        line = line.rstrip()
    except IOError:
        logger.warn("Warning, old file did not exist, assuming first run ever")
        os.link(os.path.join(dirname, newfile), os.path.join(dirname, outfile))
        return

    old_dict = dict()
    while line:
        key = line[0:12]
        value = old_dict.get(key, list())
        value.append(line[13:])
        old_dict[key] = value

        line = oldfile.readline()
        line = line.rstrip()
    oldfile.close()

    out = AtomicFileWriter(os.path.join(dirname, outfile), 'w')
    newin = open(os.path.join(dirname, newfile))

    for newline in newin:
        newline = newline.rstrip()
        pnr = newline[0:12]
        data = newline[13:]
        if pnr in old_dict:
            if data not in old_dict[pnr]:
                # Some change, want to update with new values.
                out.write(newline + "\n")
            else:
                old_dict[pnr].remove(data)

            # If nothing else is left, delete the key from the dictionary
            if not old_dict[pnr]:
                del old_dict[pnr]
        else:
            # completely new entry, output unconditionally
            out.write(newline + "\n")

    # Now, there is one problem left: we cannot output the old data blindly,
    # as people's names might have changed. So, we force *every* old record to
    # the current names in Cerebrum. This may result in the exactly same
    # record being output twice, but it should be fine.
    person = Factory.get("Person")(db)
    const = Factory.get("Constants")(db)
    logger.debug("%d old records left", len(old_dict))
    for leftpnr in old_dict:
        # FIXME: it is unsafe to assume that this will succeed
        first, last = fnr2names(person, const, leftpnr[:-1])
        if not (first and last):
            logger.warn(
                "No name information for %s is available. %d "
                "entry(ies) will be skipped", leftpnr[:-1],
                len(old_dict[leftpnr]))
            continue

        for entry in old_dict[leftpnr]:
            vals = entry.split(";")
            vals[2] = first
            vals[3] = last
            vals[13] = today
            vals[17] = ""
            out.write("%s;%s\n" % (leftpnr, ";".join(vals)))

    out.close()
    newin.close()
Пример #57
0
def quicksync_roles_and_perms(client, selection_spread, config, commit):
    """Quick sync for roles and permissions.

    :type client: EphorteWS
    :param client: The client used to talk to ePhorte

    :type selection_spread: Spread
    :param selection_spread: A person must have this spread to be synced

    :type config: Config
    :param config: Configuration

    :type commit: bool
    :param commit: Commit confirmed events?
    """
    from Cerebrum.modules import CLHandler
    clh = CLHandler.CLHandler(db)
    pe = Factory.get('Person')(db)

    change_types_roles = (co.ephorte_role_add, co.ephorte_role_rem,
                          co.ephorte_role_upd)
    change_types_perms = (co.ephorte_perm_add, co.ephorte_perm_rem)
    change_types = change_types_roles + change_types_perms

    event_selector = select_events_by_person(clh=clh,
                                             config=config,
                                             change_types=change_types,
                                             selection_spread=selection_spread)

    for person_id, events in event_selector:
        if not sanity_check_person(person_id=person_id,
                                   selection_spread=selection_spread):
            continue

        pe.clear()
        pe.find(person_id)

        if not update_person_info(pe, client):
            continue

        try:
            if update_person_roles(pe, client, remove_superfluous=True):
                for event in events:
                    if event['change_type_id'] in change_types_roles:
                        clh.confirm_event(event)
        except:
            logger.warn(u'Failed to update roles for person_id:%s',
                        person_id,
                        exc_info=True)
        else:
            if commit:
                clh.commit_confirmations()

        try:
            if update_person_perms(pe, client, remove_superfluous=True):
                for event in events:
                    if event['change_type_id'] in change_types_perms:
                        clh.confirm_event(event)
        except:
            logger.warn(u'Failed to update permissions for person_id:%s',
                        person_id,
                        exc_info=True)
        else:
            if commit:
                clh.commit_confirmations()

    if commit:
        clh.commit_confirmations()
LDAP_PERSON['entitlements_pickle_file'] from Cerebrum's configuration.
Manual specification should be used for testing only.
"""

import cerebrum_path
import cereconf

import getopt
import pickle
import os
import sys
from collections import defaultdict

from Cerebrum.Utils import Factory

logger = Factory.get_logger("cronjob")
db = Factory.get('Database')()
ac = Factory.get('Account')(db)
gr = Factory.get('Group')(db)
co = Factory.get('Constants')(db)


def usage(exitcode=0):
    print __doc__
    sys.exit(exitcode)


def get_groups_with_entitlement():
    groups_with_entitlement = {}
    for group in gr.list_traits(co.trait_group_entitlement):
        groups_with_entitlement[group['entity_id']] = group['strval']
Пример #59
0
import argparse
from collections import defaultdict

import cerebrum_path
import cereconf

cerebrum_path, cereconf  # Satisfy the linters.

from Cerebrum.Utils import Factory
from Cerebrum import Errors

from Cerebrum.modules.no.uio.Ephorte import EphorteRole
from Cerebrum.modules.no.uio.EphorteWS import EphorteWSError
from Cerebrum.modules.no.uio.Ephorte import EphortePermission

db = Factory.get('Database')(client_encoding='utf-8')

logger = Factory.get_logger("cronjob")
co = Factory.get('Constants')(db)
ou = Factory.get('OU')(db)
ephorte_role = EphorteRole(db)

# Caches
_ou_to_sko = {}
_person_to_user_id = {}
_ephorte_ous = None
_perm_codes = None
_valid_ephorte_ous = None


def get_email_address(pe):
Пример #60
0
def disable_users(client, selection_spread):
    logger.info('Fetching all users from ePhorte... go grab some coffee.')
    start = time.time()
    all_users = client.get_all_users()
    logger.info('Fetched all users in %s secs', int(time.time() - start))

    ac = Factory.get('Account')(db)
    pe = Factory.get('Person')(db)
    at_institution = '@' + cereconf.INSTITUTION_DOMAIN_NAME

    def should_be_disabled(user_id):
        """Takes a fully qualified user id and considers
        whether it should be disabled or not.

        :type user_id: str
        :param user_id: ePhorte user id, including domain

        :rtype: bool
        :returns: Disable?
        """
        user_id = user_id.lower()

        if not user_id.endswith(at_institution):
            logger.warn(u'No %s in user_id:%s, ignoring', at_institution,
                        user_id)
            return False

        account_name = user_id.split(at_institution)[0]

        try:
            ac.clear()
            ac.find_by_name(account_name)
        except Errors.NotFoundError:
            # logger.info(u'No such account:%s, user should be disabled',
            #             account_name)
            # return True
            logger.info(u'No such account:%s, ignoring user', account_name)
            return False

        try:
            pe.clear()
            pe.find(ac.owner_id)
        except Errors.NotFoundError:
            # logger.warn(
            #     u'No such person_id:%s when '
            #     u'looking for owner of account:%s, user should be disabled',
            #     ac.owner_id, account_name)
            # return True
            logger.info(
                u'No such person_id:%s when '
                u'looking for owner of account:%s, ignoring user', ac.owner_id,
                account_name)
            return False

        primary_account_id = pe.get_primary_account()

        if not primary_account_id:
            logger.info(
                u'Owner of account:%s, person_id:%s, '
                u'has no primary account, user should be disabled',
                account_name, ac.owner_id)
            return True

        ac.clear()
        ac.find(primary_account_id)
        primary_account = ac.account_name

        if not pe.has_spread(spread=selection_spread):
            logger.info(
                u'Owner of account:%s, person_id:%s, '
                u'has no ePhorte spread, user should be disabled',
                account_name, ac.owner_id)
            return True

        if account_name != primary_account:
            logger.info(
                u'Owner of account:%s, person_id:%s, has a different primary '
                u'account (%s), user should be disabled', account_name,
                ac.owner_id, primary_account)
            return True

        return False

    def is_disabled(user_id):
        user_details = client.get_user_details(user_id)
        # consider user as disabled if number of roles + permissions is zero
        disabled = (len(user_details[1]) + len(user_details[2])) == 0
        logger.debug(u'User %s disabled? %s', user_id, disabled)
        return disabled

    start = time.time()
    disabled_previously = 0
    disabled_now = 0
    failed = 0

    for eph_user_id in all_users.keys():
        logger.debug(u'Considering user_id:%s', eph_user_id)

        if should_be_disabled(eph_user_id):
            try:
                if not is_disabled(eph_user_id):
                    client.disable_user(eph_user_id)
                    logger.info(u'Successfully disabled user %s', eph_user_id)
                    disabled_now += 1
                else:
                    logger.info(u'User %s is already disabled', eph_user_id)
                    disabled_previously += 1
            except EphorteWSError, e:
                logger.warn(u'Could not disable user %s: %s',
                            eph_user_id,
                            unicode(e),
                            exc_info=True)
                failed += 1