コード例 #1
0
def init_globals():
    global db, const, logger, fnr2account_id
    global dump_dir, dryrun, immediate_evu_expire

    # Håndter upper- og lowercasing av strenger som inneholder norske
    # tegn.
    locale.setlocale(locale.LC_CTYPE, ('en_US', 'iso88591'))

    dump_dir = cereconf.FS_DATA_DIR
    dryrun = False
    logger = Factory.get_logger("cronjob")
    immediate_evu_expire = False

    opts, rest = getopt.getopt(sys.argv[1:],
                               "d:r",
                               ["dump-dir=", "dryrun",
                                "immediate-evu-expire",])
    for option, value in opts:
        if option in ("-d", "--dump-dir"):
            dump_dir = value
        elif option in ("-r", "--dryrun"):
            dryrun = True
        elif option in ("--immediate-evu-expire",):
            immediate_evu_expire = True
        # fi
    # od

    db = Factory.get("Database")()
    db.cl_init(change_program='pop_extern_grps')
    const = Factory.get("Constants")(db)

    fnr2account_id = {}
    prefetch_primaryusers()
コード例 #2
0
ファイル: test_core_Account.py プロジェクト: unioslo/cerebrum
    def setUpClass(cls):
        """
        Set up this TestCase module.

        This setup code sets up shared objects between each tests. This is done
        *once* before running any of the tests within this class.
        """

        # TODO: We might want this basic class setup in other TestCases. Maybe
        #       set up a generic TestCase class to inherit common stuff from?
        cls._db = Factory.get('Database')()
        cls._db.cl_init(change_program='nosetests')
        cls._db.commit = cls._db.rollback  # Let's try not to screw up the db

        cls._ac = Factory.get('Account')(cls._db)
        cls._ac = Account(cls._db)
        cls._co = Factory.get('Constants')(cls._db)

        # Data sources
        cls.account_ds = BasicAccountSource()
        cls.person_ds = BasicPersonSource()

        # Tools for creating and destroying temporary db items
        cls.db_tools = DatabaseTools(cls._db)
        cls.db_tools._ac = cls._ac
コード例 #3
0
def compare_names(db, logger, args):
    """ Generates an XML report for missing names. """
    co = Factory.get(b'Constants')(db)
    pe = Factory.get(b'Person')(db)
    variants = [co.PersonName(t[0]) for t in pe.list_person_name_codes()]

    logger.debug("Fetching names from {!s}".format(args.check_system))
    to_check = dict()
    for name in get_names(db, args.check_system, variants):
        to_check.setdefault(name.pid, dict())[name.variant] = name

    logger.debug("Fetching names for {:d} persons from {!s}".format(
        len(to_check), args.source_system))
    diff = dict()
    for name in get_names(
            db, args.source_system, variants, pid=to_check.keys()):
        if name.variant not in to_check[name.pid]:
            continue
        if to_check[name.pid][name.variant].value != name.value:
            diff.setdefault(name.pid, []).append(
                (name, to_check[name.pid][name.variant]))

    logger.debug("Generating report ({:d} names)".format(len(diff)))
    report = generate_report('Names', diff)
    logger.debug("Done generating report")
    return report
コード例 #4
0
def main(args=None):
    ENCODING = 'utf-8'
    logger = Factory.get_logger('cronjob')
    db = Factory.get(b'Database')()
    co = Factory.get(b'Constants')(db)

    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-o', '--output', default='/tmp/report.html')
    commands = parser.add_subparsers(help="available commands")

    # name
    name_command = commands.add_parser(
        'name',
        help="Generate report on differences in names.")
    name_command.set_defaults(func=compare_names)
    name_command.set_defaults(check_system=co.system_sap)
    name_command.add_argument(
        'source_system',
        type=partial(argparse_const, db, co.AuthoritativeSystem))

    args = parser.parse_args(args)
    command = args.func
    del args.func

    # Other commands?
    logger.info('Generating report ({!s})'.format(args.output))
    af = AtomicFileWriter(args.output)

    report = command(db, logger, args)
    report.find('head/meta[@charset]').set('charset', ENCODING)
    af.write("<!DOCTYPE html>\n")
    af.write(ElementTree.tostring(report, encoding=ENCODING))

    af.close()
    logger.info('Done')
コード例 #5
0
def main():
    global db, constants, account
    global logger, outfile, person

    outfile = None
    logger = Factory.get_logger("cronjob")
    
    try:
        opts, args = getopt.getopt(sys.argv[1:],
                                   'f:',
                                   ['file='])
    except getopt.GetoptError:
        usage()

    dryrun = False
    for opt, val in opts:
        if opt in ('-f', '--file'):
            outfile = val

    if outfile is None:
        outfile = '/cerebrum/var/cache/MAIL/mail_data.dat'

    db = Factory.get('Database')()
    constants = Factory.get('Constants')(db)
    account = Factory.get('Account')(db)
    person = Factory.get('Person')(db)

    email_data = generate_email_data()
    write_email_file(email_data, outfile)
コード例 #6
0
    def __init__(self, db):
        co = Factory.get("Constants")(db)
        pe = Factory.get("Person")(db)

        # observed ous, affs
        affs = set()
        stat = set()
        ous = set()

        data = defaultdict(list)

        for row in pe.list_affiliations():
            affs.add(row['affiliation'])
            stat.add(row['status'])
            ous.add(row['ou_id'])

            # record person by aff, status, ou
            for key in (
                (None,               None,          None),
                (row['affiliation'], None,          None),
                (None,               row['status'], None),
                (None,               None,          row['ou_id']),
                (row['affiliation'], None,          row['ou_id']),
                (None,               row['status'], row['ou_id']),
            ):
                data[key].append(row['person_id'])

        self._data = dict(data)
        self.ous = ous
        self.types = tuple((co.PersonAffiliation(a) for a in affs))
        self.subtypes = tuple((co.PersonAffStatus(s) for s in stat))
コード例 #7
0
def main():
    global db, constants, account_init, group, posixgroup
    global default_creator_id
    global dryrun, logger

    logger = Factory.get_logger("console")
    
    try:
        opts, args = getopt.getopt(sys.argv[1:],
                                   'f:d',
                                   ['file=',
                                    'dryrun'])
    except getopt.GetoptError:
        usage()

    dryrun = False
    for opt, val in opts:
        if opt in ('-d', '--dryrun'):
            dryrun = True
        elif opt in ('-f', '--file'):
            infile = val

    db = Factory.get('Database')()
    db.cl_init(change_program='import_groups')
    constants = Factory.get('Constants')(db)
    account_init = Factory.get('Account')(db)
    account_init.find_by_name(cereconf.INITIAL_ACCOUNTNAME)
    default_creator_id = account_init.entity_id
    group = Factory.get('Group')(db)
    posixgroup = PosixGroup.PosixGroup(db)

    process_line(infile)
コード例 #8
0
def write_fnrupdate_info(outfile):
    """Lager fil med informasjon om alle fødselsnummerendringer"""
    stream = AtomicFileWriter(outfile, 'w')
    writer = xmlprinter.xmlprinter(stream,
                                   indent_level = 2,
                                   # Human-readable output
                                   data_mode = True,
                                   input_encoding = "latin1")
    writer.startDocument(encoding = "iso8859-1")

    db = Factory.get("Database")()
    const = Factory.get("Constants")(db)

    writer.startElement("data", {"source_system" : str(const.system_fs)})

    data = fs.person.list_fnr_endringer()
    for row in data:
        # Make the format resemble the corresponding FS output as close as
        # possible.
        attributes = { "type" : str(const.externalid_fodselsnr), 
                       "new"  : "%06d%05d" % (row["fodselsdato_naverende"],
                                              row["personnr_naverende"]),
                       "old"  : "%06d%05d" % (row["fodselsdato_tidligere"],
                                              row["personnr_tidligere"]),
                       "date" : str(row["dato_foretatt"]),
                     }
        
        writer.emptyElement("external_id", attributes)
    # od

    writer.endElement("data")
    writer.endDocument()
    stream.close()
コード例 #9
0
    def cacheAccounts(self, account_names):
        """ Cache data for a list of account names, efficiently.

        This function has a bit of an overhead, because it looks up all users
        in the db. It is, however, a lot more efficient than looking up
        individual accounts when there's a lot of L{account_names}.

        @type account_names: set
        @param account_names:
            An iterable (ideally a set) of account names to cache data for.

        """
        ac = Factory.get('Account')(self.db)
        pe = Factory.get('Person')(self.db)

        # Save some time
        if not account_names:
            return

        # Fetch all accounts. ...would be nice to filter by names in the query
        all_accounts = ac.search(owner_type=self.co.entity_person)

        # self.accounts - Account and owner id for all candidates. Dict map:
        #   account_name -> {account_id -> , owner_id -> ,}
        filtered_accounts = filter(lambda a: a['name'] in account_names,
                                   all_accounts)
        self.accounts = dict((a['name'], {
            'account_id': a['account_id'],
            'owner_id': a['owner_id']}) for a in filtered_accounts)

        # self.mobiles - Look up the mobile phone number (from FS) for all
        # candidates. Dict mapping:
        #   person_id -> mobile number
        owners = set([a['owner_id'] for a in self.accounts.values()])
        if owners:
            self.mobiles = dict((mob['entity_id'], mob['contact_value']) for mob in
                    pe.list_contact_info(source_system=self.co.system_fs,
                                         contact_type=self.co.contact_mobile_phone,
                                         entity_type=self.co.entity_person, 
                                         entity_id=owners))

        # self.spreads - The spreads of all candidates. List of tuples: 
        #   (account_id, spread_code)
        account_ids = set([a['account_id'] for a in self.accounts.values()])
        for s in cereconf.DIGEKS_CANDIDATE_SPREADS:
            spread = self.co.Spread(s)
            spreads = filter(lambda s: s['entity_id'] in account_ids, ac.list_all_with_spread(spread))
            self.spreads.extend(spreads)

        # Quarantines
        quarantines = []
        if len(account_ids) > 0:
            quarantines = ac.list_entity_quarantines(
                    entity_types=self.co.entity_account,
                    entity_ids=account_ids, 
                    only_active=False)
        for q in quarantines:
            if q['entity_id'] not in self.quarantined.keys():
                self.quarantined[q['entity_id']] = []
            self.quarantined[q['entity_id']].append(str(self.co.Quarantine(q['quarantine_type'])))
コード例 #10
0
    def __init__(self, subjects, year, version=None, typecode=None, timecode=None):
        self.db = Factory.get('Database')()
        self.db.cl_init(change_program='proc-digeks')
        self.co = Factory.get('Constants')(self.db)
        self.fs = make_fs()

        # TODO: Describe the structure here
        self.exams = set()
        self.candidates = set()

        # FIXME: We shouldn't need to specify subject/semester/...
        if not isinstance(subjects, (list,set,tuple)):
            raise Exception('Subjects must be a (list,set,tuple)')

        self.subjects = subjects
        self.year = year
        self.typecode = typecode # vurdkombkode
        self.timecode = timecode # vurdtidkode
        self.version  = version  # versjonskode

        # Start processing
        #
        self.process_exams()

        all_candidates = set([c.username for c in self.candidates])
        logger.debug('Caching candidate data for %d unique candidates...' % len(all_candidates))
        self.cache = CandidateCache(self.db, all_candidates)
コード例 #11
0
def main():
    global logger, const, cerebrum_db, xmlwriter
    logger = Factory.get_logger("cronjob")
    logger.info("generating a new XML for export_ACL")

    cerebrum_db = Factory.get("Database")()
    const = Factory.get("Constants")(cerebrum_db)

    opts, rest = getopt.getopt(sys.argv[1:], "f:",
                               ["--out-file=",])
    filename = None
    for option, value in opts:
        if option in ("-f", "--out-file"):
            filename = value
        # fi
    # od

    _cache_id_types()
    stream = AtomicFileWriter(filename)
    xmlwriter = xmlprinter.xmlprinter(stream,
                                      indent_level = 2,
                                      # Human-readable output
                                      data_mode = True,
                                      input_encoding = "latin1")
    generate_report()
    stream.close()
コード例 #12
0
ファイル: dump_to_UA.py プロジェクト: unioslo/cerebrum
def generate_output(stream, do_employees, sysname, person_file):
    """
    Create dump for UA
    """
    db_person = Factory.get("Person")(db)
    ou = Factory.get("OU")(db)
    const = Factory.get("Constants")(db)

    if do_employees:
        logger.info("Extracting employee info from %s", person_file)

        source_system = getattr(const, sysname)
        parser = system2parser(sysname)(person_file, logger, False)

        # Go through all persons in person_info_file
        for xml_person in parser.iter_person():
            try:
                fnr = xml_person.get_id(xml_person.NO_SSN)
                if fnr is None:
                    sapnr = xml_person.get_id(xml_person.SAP_NR)
                    logger.warn('Employee %s has no fnr', sapnr)
                    continue
                db_person.find_by_external_id(const.externalid_fodselsnr, fnr,
                                              source_system=source_system)
            except Errors.NotFoundError:
                logger.warn("Couldn't find person with fnr %s in db", fnr)
                continue

            process_employee(db_person, ou, const, xml_person, fnr, stream)
            db_person.clear()
コード例 #13
0
ファイル: list_disk_quotas.py プロジェクト: unioslo/cerebrum
def list_disk_quotas(f, disk_id, spread):
    account = Factory.get("Account")(db)
    disk = Factory.get("Disk")(db)
    disk.find(disk_id)
    default_quota = disk.get_default_quota()
    if default_quota is False:
        logger.debug("Skipping %s, no quotas on disk" % disk.path)
        return

    logger.debug("Listing quotas on %s" % disk.path)

    if default_quota is None:
        default_quota = '' # Unlimited
        all_users=False
    else:
        all_users=True

    now = mx.DateTime.now()
    dq = DiskQuota(db)
    for row in dq.list_quotas(spread=spread, disk_id=disk.entity_id,
                              all_users=all_users):
        quota = row['quota']
        if row['override_expiration'] and row['override_expiration'] > now:
            quota = row['override_quota']
        if quota is None:
            quota = default_quota
        home=account.resolve_homedir(account_name=row['entity_name'],
                                     home=row['home'], disk_path=row['path'])
        f.write("%s:%s:%s\n" % (row['entity_name'], home, quota))
コード例 #14
0
ファイル: process_requests.py プロジェクト: unioslo/cerebrum
 def set_fnr2move_student(self, rows):
     # Hent ut personens fodselsnummer + account_id
     self.fnr2move_student = {}
     account = Factory.get('Account')(self.db)
     person = Factory.get('Person')(self.db)
     for r in rows:
         if not is_valid_request(self.br, r['request_id']):
             continue
         account.clear()
         account.find(r['entity_id'])
         person.clear()
         person.find(account.owner_id)
         fnr = person.get_external_id(
             id_type=self.co.externalid_fodselsnr,
             source_system=self.co.system_fs
         )
         if not fnr:
             logger.warn("Not student fnr for: %i" % account.entity_id)
             self.br.delete_request(request_id=r['request_id'])
             self.db.commit()
             continue
         fnr = fnr[0]['external_id']
         self.fnr2move_student.setdefault(fnr, []).append(
             (int(account.entity_id),
              int(r['request_id']),
              int(r['requestee_id'])))
コード例 #15
0
ファイル: ifi_auto.py プロジェクト: unioslo/cerebrum
def main():
    global db, co, logger, group_creator, dryrun

    db = Factory.get('Database')()
    db.cl_init(change_program='ifi_auto')
    co = Factory.get('Constants')(db)
    logger = Factory.get_logger("cronjob")
    dryrun = False

    try:
        opts, args = getopt.getopt(sys.argv[1:], '?',
                                   ['dryrun', 'help'])
    except getopt.GetoptError:
        usage()
    for opt, val in opts:
        if opt == '--dryrun':
            dryrun = True
        if opt in ('-?', '--help'):
            usage(0)

    supergroup = "internal:uio.no:fs:{autogroup}"
    fg_supergroup = "internal:uio.no:fs:{ifi_auto_fg}"
    group_creator = get_account(cereconf.INITIAL_ACCOUNTNAME).entity_id
    process_groups(supergroup, fg_supergroup)
    if not dryrun:
        logger.debug("commit...")
        db.commit()
    logger.info("All done")
コード例 #16
0
def output_text(output_file):
    """
    Initialize data structures and start generating the output.
    """

    output_stream = MinimumSizeWriter(output_file, "w")
    # 1MB is the minimum allowed size for the portal dump.
    # The number is somewhat magic, but it seems sensible
    output_stream.min_size = 1024*1024
    db_cerebrum = Factory.get("Database")()
    logger.debug(cereconf.DB_AUTH_DIR)
    
    logger.debug(Database.__file__)
    db = Database.connect(user="******",
                          service="FSPROD.uio.no",
                          DB_driver=cereconf.DB_DRIVER_ORACLE)
    db_fs = FS(db)
    
    db_person = Factory.get("Person")(db_cerebrum)
    db_account = Factory.get("Account")(db_cerebrum)
    constants = Factory.get("Constants")(db_cerebrum)

    # FS is first. This is intentional.
    lookup_order = [constants.system_fs]
    for authoritative_system_name in cereconf.SYSTEM_LOOKUP_ORDER:
        lookup_order.append(getattr(constants, authoritative_system_name))
    
    rows = db_fs.portal.list_eksmeld()
    logger.debug("Fetched portal information from FS")
    for row in rows:
        output_row(row, output_stream,
                   db_person, db_account, constants,
                   lookup_order)

    output_stream.close()
コード例 #17
0
def main():
    global db, logger, const, emailsrv

    logger = Factory.get_logger("console")
    db = Factory.get("Database")()
    const = Factory.get("Constants")(db)
    db.cl_init(change_program="email_dom")
    creator = Factory.get("Account")(db)
    creator.clear()
    creator.find_by_name('bootstrap_account')
    infile = None
    emailsrv = False
    disk_in = host_in = False

    try:
        opts, args = getopt.getopt(sys.argv[1:],
                                   'f:h:d:e',
                                   ['file=',
                                    'disk=',
                                    'host=',
                                    'email-server',
                                    'help',
                                    'dryrun'])
    except getopt.GetoptError, e:
        print e
        usage(1)
コード例 #18
0
def main():
    """Main driver for the file generation."""

    global xmlwriter, db, const, logger

    db = Factory.get("Database")()
    const = Factory.get("Constants")(db)
    logger = Factory.get_logger("cronjob")

    try:
        opts, args = getopt.getopt(sys.argv[1:], "o:",
                                   ["out-file="])
    except getopt.GetoptError:
        usage(1)

    filename = None
    for opt, val in opts:
        if opt in ('-o', '--out-file'):
            filename = val
    if not filename:
        usage(1)    

    stream = AtomicFileWriter(filename)
    xmlwriter = xmlprinter.xmlprinter(stream,
                                      indent_level=2,
                                      # human-friendly output
                                      data_mode=True,
                                      input_encoding="UTF-8")
    # Get information about persons
    persons = fetch_person_data()
    # Get information about courses (kurs)
    courses = fetch_course_data()
    # Generate and write document
    generate_document(persons, courses)
    stream.close()
コード例 #19
0
ファイル: pq.py プロジェクト: unioslo/cerebrum
def get_authorized_hosts(machine_list):
    db = Factory.get('Database')()
    gr = Factory.get('Group')(db)
    co = Factory.get('Constants')(db)

    def lookup_gids(groups):
        l = []
        for group in groups:
            gr.clear()
            try:
                gr.find_by_name(group[1:])
            except Errors.NotFoundError:
                continue
            l.append(gr.entity_id)
        return l

    groups = filter(lambda x: x.startswith('@'), machine_list)
    machines = set(machine_list) - set(groups)

    machines.update(map(lambda x: x['member_name'],
                        gr.search_members(group_id=lookup_gids(groups),
                                          indirect_members=True,
                                          member_type=co.entity_dns_owner,
                                          include_member_entity_name=True)))

    return map(lambda x: socket.gethostbyname(x), machines)
コード例 #20
0
def fill_account(acct, db, co, data):
    """Look up account and fill data

    data should contain:
    old = key → set of group ids
    new = set of group ids
    owner = owner id
    """
    g = Factory.get('Group')(db)
    a = Factory.get('Account')(db)
    a.find(acct)
    ats = a.get_account_types()
    own = data['person'].get(a.owner_id)
    if own is None:
        own = fill_person(a.owner_id, db, co, data)
    ret = {
        'old': {},
        'new': set(),
        'owner': own,
        'ignoregroups': data['ignoregroups'],
    }
    for at in ats:
        grps = g.list_ou_groups_for(at['ou_id'], affiliation=at['affiliation'],
                                    member_types=co.virtual_group_ou_accounts,
                                    indirect=False)
        k = key(at['ou_id'], at['affiliation'])
        dct = ret['old'][k] = set()
        for gid in (int(x['group_id']) for x in grps):
            if gid not in data['ignoregroups']:
                dct.add(gid)
                ret['new'].add(gid)
    data['account'][acct] = ret
    return ret
コード例 #21
0
    def __init__(self, db_conn, pe_cls=None, ac_cls=None, gr_cls=None,
                 co_cls=None,):
        """ Initialize with a Cerebrum.Database object. """
        self._db = db_conn
        self._db.commit = self._db.rollback

        if not isinstance(pe_cls, Person):
            pe_cls = Factory.get('Person')
        self._pe = pe_cls(self._db)

        if not isinstance(ac_cls, Account):
            ac_cls = Factory.get('Account')
        self._ac = ac_cls(self._db)

        if not isinstance(gr_cls, Group):
            gr_cls = Factory.get('Group')
        self._gr = gr_cls(self._db)

        if not isinstance(gr_cls, OU):
            ou_cls = Factory.get('OU')
        self._ou = ou_cls(self._db)

        if not isinstance(co_cls, Constants):
            co_cls = Factory.get('Constants')
        self._co = co_cls(self._db)

        self._init_account_id = None
        self._init_group_id = None

        self.constants = []
        self.account_ids = set()
        self.person_ids = set()
        self.group_ids = set()
        self.ou_ids = set()
コード例 #22
0
def main():
    global verbose, f, db, co, ldap, auth, start

    parser = argparse.ArgumentParser()
    parser.add_argument('-v', "--verbose", action="count", default=0)
    parser.add_argument('-m', "--mail-file")
    parser.add_argument('-s', "--spread", default=ldapconf('MAIL', 'spread', None))
    parser.add_argument('-i', "--ignore-size", dest="max_change", action="store_const", const=100)
    parser.add_argument('-a', "--no-auth-data", dest="auth", action="store_false", default=True)
    args = parser.parse_args()

    verbose = args.verbose
    auth = args.auth

    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)

    start = now()
    curr = now()

    if verbose:
        logger.debug("Loading the EmailLDAP module...")
    ldap = Factory.get('EmailLDAP')(db)
    if verbose:
        logger.debug("  done in %d sec." % (now() - curr))

    spread = args.spread
    if spread is not None:
        spread = map_spreads(spread, int)

    f = ldif_outfile('MAIL', args.mail_file, max_change=args.max_change)
    get_data(spread)
    end_ldif_outfile('MAIL', f)
コード例 #23
0
    def main(self):
        self.parse_options()

        self.db = Factory.get('Database')()
        self.co = Factory.get('Constants')(self.db)
        self.group = Factory.get('Group')(self.db)
        self.posix_user = Factory.get('PosixUser')(self.db)
        self.posix_group = PosixGroup.PosixGroup(self.db)
        self._namecachedtime = mx.DateTime.now()

        self._num = 0
        self.posix_users = []
        self.e_id2name = {}
        self.p_id2name = {}
        self.auth_data = {}
        self.disk_tab = {}
        self.shell_tab = {}
        self.quarantines = {}
        self.filegroups = {}
        self.netgroups = {}
        self.host_netgroups = {}
        self.account2def_group = {}
        self.g_id2gid = {}
        self.a_id2owner = {}
        self.a_id2home = {}
        self._names = set()

        self.setup()
        self.generate_files()
コード例 #24
0
ファイル: QuarantineHandler.py プロジェクト: unioslo/cerebrum
def _test():
    # TODO: This should use the unit-testing framework, and use common
    # constants (which we currently don't have for spreads)
    cereconf.QUARANTINE_RULES = {
        'nologin': {'lock': 1, 'shell': 'nologin-shell', 'sort_num': 10},
        'system': [{'lock': 1, 'shell': 'nologin-shell2', 'sort_num': 2},
                   {'spread': 'AD_account', 'shell': 'ad-shell', 'sort_num': 3}]
    }
    from Cerebrum.Utils import Factory
    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)

    # Check with old cereconf syntax
    qh = QuarantineHandler(db, (co.quarantine_nologin,))
    print "nolgin: L=%i, S=%s" % (qh.is_locked(), qh.get_shell())

    # New cereconf syntax, non-spread spesific
    qh = QuarantineHandler(db, (co.quarantine_system,))
    print "system: L=%i, S=%s" % (qh.is_locked(), qh.get_shell())

    # spread-spesific quarantine action, should not be locked
    qh = QuarantineHandler(db, (co.quarantine_system,),
                           spreads=(co.spread_uio_ad_account,))
    print "system & AD: L=%i, S=%s" % (qh.is_locked(), qh.get_shell())

    # spread-specific quarantine action and another quarantine that
    # requires lock
    qh = QuarantineHandler(db, (co.quarantine_system, co.quarantine_nologin),
                           spreads=(co.spread_uio_ad_account,))
    print "system & AD & L: L=%i, S=%s" % (qh.is_locked(), qh.get_shell())

    qh = QuarantineHandler.check_entity_quarantines(db, 67201)
    print "An entity: L=%i, S=%s" % (qh.is_locked(), qh.get_shell())
コード例 #25
0
ファイル: ProcHandler.py プロジェクト: unioslo/cerebrum
    def ac_type_del(self, account_id, affiliation, ou_id):
        """Deletes an account from special groups which represent an
        affiliation at an OU. Delete the group if no members are present."""
        ou = Factory.get("OU")(self.db)
        ou.find(ou_id)

        # Look up the group
        grp_name = "%s %s" % (self._get_ou_acronym(ou), affiliation)
        if not self._group:
            self._group = Factory.get('Group')(self.db)
        try:
            self._group.clear()
            self._group.find_by_name(grp_name)
            self.logger.debug("ac_type_del: Group '%s' found." % grp_name)
            if self._group.has_member(account_id):
                self._group.remove_member(account_id)
                self._group.write_db()
                self.logger.info(
                    "ac_type_del: Account '%s' deleted from group '%s'." %
                    (account_id, grp_name))
            # Deal with empty groups as well
            if len(list(self._group.search_members(
                    group_id=self._group.entity_id,
                    indirect_members=True,
                    member_type=self._co.entity_account))) == 0:
                self._group.delete()
                self._group.write_db()
        except Errors.NotFoundError:
            self.logger.debug(
                "ac_type_del: Group '%s' not found. Nothing to do" % grp_name)
コード例 #26
0
def mangle(from_server, to_server, commit):
    db = Factory.get('Database')()
    et = Factory.get('EmailTarget')(db)
    db.cl_init(change_program='update_email_target_server')
    # Yes yes yes, it is quite pretty
    es = EmailServer(db)

    es.clear()
    es.find_by_name(from_server)
    from_server_id = es.entity_id

    es.clear()
    es.find_by_name(to_server)
    to_server_id = es.entity_id

    for row in et.list_email_server_targets():
        if row.has_key('server_id') and row['server_id'] == from_server_id:
            et.clear()
            et.find(row['target_id'])
            old_sid = et.email_server_id
            et.email_server_id = to_server_id
            et.write_db()
            print('Moved %d from %d to %d' % \
                    (et.entity_id, old_sid, to_server_id))

    if commit:
        db.commit()
        print 'Committed all changes'
    else:
        db.rollback()
        print 'Rolled back all changes'
コード例 #27
0
def main():
    global zone
    try:
        opts, args = getopt.getopt(sys.argv[1:], 'Z:i:o:zrH:', [
            'help',])
    except getopt.GetoptError:
        usage(1)
    if not opts:
        usage(1)

    header_splitter=default_header_splitter
    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)
    for opt, val in opts:
        if opt in ('--help', ):
            usage()
        elif opt in ('-i',):
            infile = val
        elif opt in ('-o',):
            outfile = val
        elif opt in ('-H',):
            header_splitter = val
        elif opt in ('-z',):
            strip_zone_file(infile, outfile, zone, header_splitter=header_splitter)
        elif opt in ('--zone-def', '-Z'):
            zone = co.DnsZone(val)
            int(zone) # Trigger error if missing
        elif opt in ('-r',):
            strip_reverse_file(infile, outfile, zone)
コード例 #28
0
def init_globals(args):
    global db, const, group, ou, person
    db = Factory.get("Database")()
    const = Factory.get("Constants")(db)
    group = Factory.get("Group")(db)
    person = Factory.get("Person")(db)
    ou = Factory.get("OU")(db)
コード例 #29
0
ファイル: reaper.py プロジェクト: unioslo/cerebrum
def enforce_user_constraints(db):
    """ Check a number of business rules for our users. """
    account = Factory.get("Account")(db)
    const = Factory.get("Constants")()
    for row in account.list(filter_expired=False):
        # We check FA/VA only
        if row["np_type"] not in (const.fedaccount_type,
                                  const.virtaccount_type):
            continue

        account.clear()
        account.find(row["entity_id"])
        # Expiration is not set -> force it to default
        if row["expire_date"] is None:
            logger.warn("Account %s (id=%s) is missing expiration date.",
                        account.account_name,
                        account.entity_id)
            account.expire_date = now() + account.DEFAULT_ACCOUNT_LIFETIME
            account.write_db()

        # Expiration is too far in the future -> force it to default
        if row["expire_date"] - now() > account.DEFAULT_ACCOUNT_LIFETIME:
            logger.warn("Account %s (id=%s) has expire date too far in the"
                        " future.", account.account_name, account.entity_id)
            account.expire_date = now() + account.DEFAULT_ACCOUNT_LIFETIME
            account.write_db()
コード例 #30
0
def get_person_info(db, person, ssn_type, source_system,
                    telephone_types):
    """Collect information about `person`.

    :param Cerebrum.database.Database db: DB connection object.
    :param Cerebrum.Constants._EntityExternalIdCode ssn_type: External id type
        to filter by.
    :param Cerebrum.Constants._AuthoritativeSystemCode source_system: Source
        system to filter by.
    :param Cerebrum.Constants._ContactInfoCode telephone_types: Filter
        telephone entries by type."""
    if isinstance(person, (int, long)):
        pe = Factory.get('Person')(db)
        pe.find(person)
    else:
        pe = person

    co = Factory.get('Constants')(db)

    return {
        'firstname': pe.get_name(source_system, co.name_first),
        'lastname': pe.get_name(source_system, co.name_last),
        'title': 'Mr' if pe.gender == co.gender_male else 'Ms',
        'feide_id': _construct_feide_id(db, pe),
        'email_address': _get_primary_emailaddress(db, pe),
        'phone': _get_phone(db, pe, source_system, telephone_types),
        'ssn': _get_ssn(db, pe, ssn_type, source_system)
    }
コード例 #31
0
def main():
    global db, co, ac, group, person, qua, logger
    global server, ou, child_ou
    c_data = {}
    ad_data = {}

    db = Factory.get('Database')()
    db.cl_init(change_program="adusync")
    co = Factory.get('Constants')(db)
    ac = Factory.get('Account')(db)
    ou = Factory.get("OU")(db)
    child_ou = Factory.get("OU")(db)
    group = Factory.get('Group')(db)
    person = Factory.get('Person')(db)
    qua = Entity.EntityQuarantine(db)
    logger = Factory.get_logger("cronjob")

    passwd = db._read_password(cereconf.AD_SERVER_HOST,
                               cereconf.AD_SERVER_UNAME)

    # Connect to AD-service at NMH
    #
    server = xmlrpclib.Server(
        "https://%s@%s:%i" %
        (passwd, cereconf.AD_SERVER_HOST, cereconf.AD_SERVER_PORT))
    try:
        opts, args = getopt.getopt(sys.argv[1:], '', ['help', 'dry_run'])
    except getopt.GetoptError:
        usage(1)

    dry_run = False

    for opt, val in opts:
        if opt == '--help':
            usage(1)
        elif opt == '--dry_run':
            dry_run = True

    c_data = get_cerebrum_data()

    # Fetch AD-data. Catch ProtocolError and don't write xpe.url to log
    # since it may contain a password.
    try:
        ad_data = get_ad_data()
    except xmlrpclib.ProtocolError, xpe:
        logger.critical("Error connecting to AD service. Giving up!: %s %s" %
                        (xpe.errcode, xpe.errmsg))
        return
コード例 #32
0
    def _new_personal_group(self, creator_id):
        group = Factory.get('PosixGroup')(self._db)

        def get_available_dfg_name(basename):
            group = Factory.get('Group')(self._db)

            def alternatives(base):
                # base -> base, base1, base2, ... base9
                yield base
                if len(base) >= 8:
                    base = base[:-1]
                for i in range(1, 10):
                    yield base + str(i)

            for name in alternatives(basename):
                try:
                    group.find_by_name(name)
                    group.clear()
                    continue
                except Errors.NotFoundError:
                    return name
            # TODO: Better exception?
            raise Errors.NotFoundError(
                "Unable to find a group name for {!s}".format(basename))

        # Find any group previously marked as this users personal group.
        personal_dfg_name = get_available_dfg_name(self.account_name)

        group.populate(
            creator_id=creator_id,
            visibility=self.const.group_visibility_all,
            name=personal_dfg_name,
            description='Personal file group for {}'.format(self.account_name),
            group_type=self.const.group_type_personal,
        )

        # Intermediate write, to get an entity_id
        group.write_db()

        yield group

        group.populate_trait(self.const.trait_personal_dfg,
                             target_id=self.entity_id)

        group.write_db()
コード例 #33
0
ファイル: job_runner.py プロジェクト: Narvik-kommune/cerebrum
def run_daemon(jr_socket, jobs, quiet=False, thread=True):
    """ Try to start a new job runner daemon. """
    sock = SocketServer(jr_socket=jr_socket)

    # Abstract Action to get a lockfile
    # TODO: Couldn't we just use the socket to see if we're running?
    lock = LockFile('master_jq_lock')

    try:
        if sock.ping_server():
            raise SystemExit(int(quiet) or "Server already running")
        try:
            lock.acquire()
        except LockExists:
            logger.error(
                "%s: Master lock exists, but jr-socket didn't respond to "
                "ping. This should be a very rare error!",
                lock.filename)
            raise SystemExit(1)
    except SocketTimeout:
        # Assuming that previous run aborted without removing socket
        logger.warn("Socket timeout, assuming server is dead")
        try:
            os.unlink(jr_socket)
        except OSError:
            pass
        pass

    # TODO: Why don't we re-aquire the lock here?

    queue = JobQueue(jobs, Factory.get('Database')())
    runner = JobRunner(queue)

    if thread:
        socket_thread = threading.Thread(
            target=sock.start_listener,
            args=(runner, ))
        socket_thread.setDaemon(True)
        socket_thread.setName("socket_thread")
        socket_thread.start()

    runner.run_job_loop()
    logger.debug("bye")
    sock.cleanup()
    lock.release()
コード例 #34
0
def remove_traits(leftover_traits):
    """Remove traits from Cerebrum to synchronise the information.

    L{load_old_traits} builds a cache data structure that keeps track of all
    traits assigned to people in Cerebrum. Other functions update that cache
    and remove entries that should be considered up to date. When this
    function is called, whatever is left in cache is considered to be traits
    that have been assigned to people, but which should no longer exist, since
    the data from the authoritative source system says so.

    So, this function sweeps through leftover_traits and removes the traits
    from Cerebrum.

    :type leftover_traits: dict (see L{load_old_traits})
    :param leftover_traits:
      Cache of no longer relevant traits that should be removed.
    """

    logger.debug("Removing old traits (%d person objects concerned)",
                 len(leftover_traits))
    # Technically, EntityTrait and Person are different objects, but trait
    # auto administration in this context assumes person objects, so we can
    # safely ask for a 'Person' rather than an EntityTrait.
    person = Factory.get("Person")(db)
    for person_id, traits in leftover_traits.iteritems():
        try:
            person.clear()
            person.find(person_id)
        except Errors.NotFoundError:
            logger.warn(
                "Person id=%s is in cache, but not in Cerebrum. "
                "Another job removed it from the db?", person_id)
            continue

        for trait in traits:
            try:
                person.delete_trait(trait)
                logger.info("Person id=%s lost trait %s", person_id,
                            const.EntityTrait(trait))
            except Errors.NotFoundError:
                logger.warn("Trait %s for person %s has already been deleted.",
                            const.EntityTrait(trait), person_id)

        person.write_db()
    logger.debug("Deleted all old traits")
コード例 #35
0
def main(args=None):
    """Main script runtime.

    This parses arguments and handles the database transaction.
    """
    import argparse
    parser = argparse.ArgumentParser(description=__doc__)

    parser.add_argument('--commit',
                        default=False,
                        action='store_true',
                        help='Commit changes.')
    parser.add_argument('--username',
                        dest='username',
                        metavar='<username>',
                        help='Force provisioning of person related to '
                        '<username>')
    args = parser.parse_args(args)

    logger.info("START %s with args: %s", parser.prog, args.__dict__)

    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog.split('.')[0])

    collector = tuple()
    if args.username:
        import functools
        collector = (functools.partial(uname2pid,
                                       **{'username': args.username}), )

    try:
        generate_events(db, *collector)
    except Exception:
        logger.error("Unexpected exception", exc_info=1)
        db.rollback()
        raise

    if args.commit:
        logger.info("Commiting changes")
        db.commit()
    else:
        logger.info("Rolled back changes")
        db.rollback()

    logger.info("DONE %s", parser.prog)
コード例 #36
0
ファイル: tag_student_disks.py プロジェクト: chrnux/cerebrum
def ensure_untagged(disk_id):
    """Makes sure that the disk in question is not tagged.

    @param disk_id:
      Entity ID of the disk that is to be processed
    @type disk_id:
      int
      
    """
    disk = Factory.get("Disk")(db)
    disk.find(disk_id)
    if disk.get_trait(constants.trait_student_disk):
        logger.info("Disk '%s' (%s) tagged => untagging" % (disk.path, disk_id))
        disk.delete_trait(constants.trait_student_disk)
        disk.write_db()
    else:
        logger.debug("Disk '%s' (%s) is not tagged, as it shouldn't be" %
                     (disk.path, disk_id))
コード例 #37
0
    def _get_membership_info(self, users):
        """Collect group memberships information."""
        group = Factory.get("Group")(self.db)
        self.logger.debug("Collecting user membership information")

        # crap. this is going to be VERY expensive...
        for row in group.search_members(member_type=self.const.entity_account):
            group_id = row["group_id"]
            if group_id not in self.groups:
                continue

            account_id = row["member_id"]
            if account_id not in users:
                continue

            gname = self._gname2dn(self.groups[group_id]["name"])
            users[account_id].setdefault("uioMemberOf", list()).append(gname)
        return users
コード例 #38
0
 def get_gecos(self):
     """Returns the gecos string of this object.  If self.gecos is
     not set, gecos is a washed version of the persons cached fullname"""
     default_gecos_name = getattr(self.const, cereconf.DEFAULT_GECOS_NAME)
     if self.gecos is not None:
         return self.gecos
     if self.owner_type == int(self.const.entity_group):
         return self.simplify_name("%s user" % self.account_name,
                                   as_gecos=1)
     assert self.owner_type == int(self.const.entity_person)
     p = Factory.get("Person")(self._db)
     p.find(self.owner_id)
     try:
         ret = p.get_name(self.const.system_cached, default_gecos_name)
         return self.simplify_name(ret, as_gecos=1)
     except Errors.NotFoundError:
         pass
     return "Unknown"  # Raise error?
コード例 #39
0
ファイル: IPUtils.py プロジェクト: chrnux/cerebrum
    def same_subnet(s1, s2):
        from Cerebrum.Utils import Factory
        from Cerebrum.modules.dns.Errors import SubnetError
        from Cerebrum.modules.dns.Subnet import Subnet
        db = Factory.get('Database')()
        sub = Subnet(db)
        try:
            sub.find(s1)
            tmp = sub.subnet_ip
            sub.clear()
            sub.find(s2)
        except SubnetError:
            return False

        if tmp == sub.subnet_ip:
            return True
        else:
            return False
コード例 #40
0
 def __init__(self, db, logger, default_zone):
     self.logger = logger
     self.db = db
     self.const = Factory.get('Constants')(self.db)
     # TBD: This pre-allocating may interfere with multi-threaded bofhd
     self._arecord = ARecord.ARecord(self.db)
     self._aaaarecord = AAAARecord.AAAARecord(self.db)
     self._host = HostInfo.HostInfo(self.db)
     self._dns_owner = DnsOwner.DnsOwner(self.db)
     self._ip_number = IPNumber.IPNumber(self.db)
     self._ipv6_number = IPv6Number.IPv6Number(self.db)
     self._cname = CNameRecord.CNameRecord(self.db)
     self._validator = IntegrityHelper.Validator(self.db, default_zone)
     self._update_helper = IntegrityHelper.Updater(self.db)
     self._mx_set = DnsOwner.MXSet(self.db)
     self.default_zone = default_zone
     self._find = Utils.Find(self.db, default_zone)
     self._parser = Utils.DnsParser(self.db, default_zone)
コード例 #41
0
    def find_ansattnr(self, person):
        """Find a person's ansattnr."""
        if self.ansattnr_cache:
            if person.entity_id in self.ansattnr_cache:
                a = self.ansattnr_cache[person.entity_id]
                return a
            return None

        pe = Factory.get('Person')(self.db)
        for row in pe.search_external_ids(
                entity_id=person.entity_id,
                source_system=self.authoritative_system,
                id_type=self.ansattnr_code,
                fetchall=False):
            if 'external_id' in row.keys():
                return row['external_id']
        # No ansattnr found
        return None
コード例 #42
0
def make_ou_to_parent_map(perspective, db):
    """
    Returns a dictionary mapping ou_ids to their parent ids (or None, if no
    parent exists) in a given PERSPECTIVE (FS, LT, etc.)
    """

    ou = Factory.get("OU")(db)
    result = dict()

    for item in ou.get_structure_mappings(perspective):
        if item["parent_id"] is not None:
            parent_id = int(item["parent_id"])
        else:
            parent_id = None
        result[int(item["ou_id"])] = parent_id

    logger.debug("%d ou -> parent mappings", len(result))
    return result
コード例 #43
0
ファイル: NISUtils.py プロジェクト: Narvik-kommune/cerebrum
 def __init__(self,
              namespace,
              member_type,
              group_spread,
              member_spread,
              tmp_group_prefix='x'):
     self._namecachedtime = mx.DateTime.now()
     self._member_spread = member_spread
     self._group_spread = group_spread
     self._member_type = member_type
     self._exported_groups = {}
     self._tmp_group_prefix = tmp_group_prefix
     self._group = Factory.get('Group')(db)
     for row in self._group.search(spread=group_spread):
         self._exported_groups[int(row['group_id'])] = row['name']
     self._num = 0
     self._entity2name = self._build_entity2name_mapping(
         [namespace, co.group_namespace])
コード例 #44
0
    def find_primary_sko(self, primary_ou_id):
        """
        Find the sko corresponding to a primary_ou_id.

        In the simplest case, this is just ou.find(). However, if the sko
        found is NOT known to FS (that happens), then we follow the
        OU-hierarchy until we find a parent that *is* known to FS.
        """
        ou = Factory.get("OU")(self.db)
        try:
            ou.find(primary_ou_id)
            if self.fs.info.get_ou(ou.fakultet, ou.institutt, ou.avdeling,
                                   ou.institusjon):
                return ou.institusjon, ou.fakultet, ou.institutt, ou.avdeling
            # go up 1 level to the parent
            return self.find_primary_sko(ou.get_parent(self.ou_perspective))
        except Errors.NotFoundError:
            return None
コード例 #45
0
def make_account_name_lookup(db):
    """
    :return callable:
        Returns a function that maps account entity ids to account names.
    """
    ac = Factory.get('Account')(db)
    logger.debug("caching account names...")
    cache = dict()
    for row in ac.search(expire_start=None):
        cache[row['account_id']] = row['name']
    logger.debug("done caching account names")

    def get_account_name(entity_id):
        if entity_id in cache:
            return cache[entity_id]
        return '<id:{:d}>'.format(entity_id)

    return get_account_name
コード例 #46
0
def output_account_info(writer, person_db):
    """Output primary account and e-mail informatino for person_db."""

    primary_account = person_db.get_primary_account()
    if primary_account is None:
        logger.info("Person %s has no accounts", person_db.entity_id)
        return

    account_db = Factory.get("Account")(cerebrum_db)
    account_db.find(primary_account)
    output_element(writer, account_db.get_account_name(), "brukernavn")

    try:
        primary_email = account_db.get_primary_mailaddress()
        output_element(writer, primary_email, "epost")
    except Errors.NotFoundError:
        logger.info("person %s has no primary e-mail address",
                    person_db.entity_id)
コード例 #47
0
ファイル: import_SAP_person.py プロジェクト: chrnux/cerebrum
def clean_person_data(processed_persons):
    """Removes information from person objects.

    :param set processed_persons: Person ids which information should not be
        removed from."""
    person = Factory.get('Person')(database)
    existing_persons = set(map(lambda x: x['person_id'],
                               person.list_persons()))
    for person_id in existing_persons - processed_persons:
        logger.info('Clearing contact info, addresses and title '
                    'for person_id:{}'.format(person_id))
        person.clear()
        person.find(person_id)
        person.populate_contact_info(const.system_sap)
        person.populate_address(const.system_sap)
        person.delete_name_with_language(name_variant=const.personal_title,
                                         name_language=const.language_nb)
        person.write_db()
コード例 #48
0
ファイル: pq_update.py プロジェクト: chrnux/cerebrum
def main():
    global db, verbose
    try:
        opts, args = getopt.getopt(sys.argv[1:], 'uv', ['dryrun'])
    except getopt.GetoptError:
        usage(1)
    dryrun = verbose = 0
    db = Factory.get('Database')()
    db.cl_init(change_program='pq_update')
    for opt, val in opts:
        if opt == '--dryrun':
            dryrun = 1
        elif opt == '-v':
            verbose += 1
        elif opt == '-u':
            update_quotas(dryrun)
    if not opts:
        usage(1)
コード例 #49
0
    def get_owner(self, guestname):
        """
        Find owner for the given guest account.

        @param guestname: uname of guest account
        @type guestname: str

        @rtype: int
        @return: entity_id of owner
        """
        ac = Factory.get('Account')(self.db)
        ac.find_by_name(guestname)
        owner = ac.get_trait(self.co.trait_uio_guest_owner)
        if not owner:
            raise Errors.NotFoundError("Not a guest account.")
        if not owner['target_id']:
            raise GuestAccountException("Already available.")
        return int(owner['target_id'])
コード例 #50
0
    def get_account_list(self, person):
        """Return a list of a person's accounts and a short status. The accounts
        are sorted by priority.

        @type  person: Cerebrum.Person instance
        @param person: A Person instance, set with the person to get the
                       accounts from.
        """
        account = Factory.get('Account')(self.db)
        accounts = dict((a['account_id'], 9999999)
                        for a in account.list_accounts_by_owner_id(
                            owner_id=person.entity_id, filter_expired=False))
        for row in account.get_account_types(all_persons_types=True,
                                             owner_id=person.entity_id,
                                             filter_expired=False):
            if accounts[row['account_id']] > int(row['priority']):
                accounts[row['account_id']] = int(row['priority'])
        ret = list()
        for (ac_id, pri) in accounts.iteritems():
            account.clear()
            try:
                account.find(ac_id)
            except Errors.NotFoundError:
                log.error("Couldn't find account with id %s" % ac_id)
                continue
            status = 'status_inactive'
            if not (account.is_expired() or account.is_deleted()):
                status = 'status_active'
                accepted_quars = [
                    int(getattr(self.co, q))
                    for q in cereconf.INDIVIDUATION_ACCEPTED_QUARANTINES
                ]
                if any(q['quarantine_type'] not in accepted_quars
                       for q in account.get_entity_quarantine(
                           only_active=True)):
                    status = 'status_inactive'
            ret.append({
                'uname': account.account_name,
                'priority': pri,
                'status': status
            })
        # Sort by priority
        ret.sort(key=lambda x: x['priority'])
        return ret
コード例 #51
0
ファイル: LDIFHelper.py プロジェクト: chrnux/cerebrum
    def _get_contact_info(self, users):
        """Update users with name and e-mail data."""

        account = Factory.get("Account")(self.db)
        contact2tag = {
            self.const.virthome_contact_email: "mail",
            self.const.human_first_name: "givenName",
            self.const.human_last_name: "sn",
        }

        self.logger.debug("Collecting email/name info for LDAP export")
        for eci in account.list_contact_info(
                source_system=self.const.system_virthome,
                contact_type=tuple(contact2tag)):
            account_id = eci["entity_id"]
            if account_id not in users:
                continue

            contact_type = int(eci["contact_type"])
            contact_value = eci["contact_value"]
            tag = contact2tag[contact_type]
            users[account_id][tag] = contact_value

        self.logger.debug("Calculating cn and adjusting VA names")
        suffix = " (unverified)"
        for account_id in users:
            vals = users[account_id]
            first = vals.get("givenName") or ""
            last = vals.get("sn") or ""
            if not first and not last:
                full = vals["uname"]
            else:
                full = " ".join((first, last))

            if vals["np_type"] == self.const.virtaccount_type:
                first = first + suffix
                last = last + suffix
                full = full + suffix

            vals["givenName"] = first
            vals["sn"] = last
            vals["cn"] = full

        return users
コード例 #52
0
    def can_add_group_member(self, op_id, src_entity, member_type, dest_group):
        """
        Checks if the operator has permission to add group members for
        the given group. TSD requires that members being added by group
        moderators, must be affiliated with the same project as the group
        they're being added to.

        @type op_id: int
        @param op_id: The entity_id of the user performing the operation.

        @type dest_group: EntityType Group
        @param dest_group: The group to add/remove members to/from.
        """

        self.can_alter_group(op_id, dest_group)

        # If not a superuser, ensure that dest_group is a project group,
        # and that src_entity is affiliated with the same project as
        # dest_group.
        if not self.is_superuser(op_id):
            try:
                proj_id = dest_group.get_trait('project_group')['target_id']
            except Exception:
                raise CerebrumError('Destination group is not a project '
                                    'group.')
            ou = Factory.get('OU')(self._db)
            ou.find(proj_id)
            proj_name = ou.get_project_name()
            if member_type in ("group", self.const.entity_group):
                try:
                    group_trait = src_entity.get_trait('project_group')
                except Exception:
                    raise PermissionDenied(
                        'Group to be added is not a project group.')
                if not group_trait['target_id'] == proj_id:
                    raise PermissionDenied(
                        'Group %s is not affiliated with %s' %
                        (src_entity.group_name, proj_name))
            elif member_type in ("account", self.const.entity_account):
                if not src_entity.get_tsd_project_id() == proj_id:
                    raise PermissionDenied(
                        'Account %s is not affiliated with %s.' %
                        (src_entity.account_name, proj_name))
        return True
コード例 #53
0
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="Generate a group tree for LDAP", )
    parser.add_argument(
        '--ldiffile',
        help='Write groups to the ldif-file %(metavar)',
        metavar='file',
    )
    parser.add_argument(
        '--picklefile',
        help='Write group memberships to the pickle-file %(metavar)s',
        metavar='file',
    )
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    if not any((args.ldiffile, args.picklefile)):
        parser.error('Must use --ldiffile or --picklefile')

    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start %s', parser.prog)
    logger.debug('args: %r', args)

    ldiffile = args.ldiffile
    picklefile = args.picklefile

    db = Factory.get('Database')()
    dn = ldapconf('GROUP', 'dn')

    logger.info('Generating LDIF...')
    destfile = ldif_outfile('GROUP', ldiffile)
    destfile.write(container_entry_string('GROUP'))
    mbr2grp = dump_ldif(db, dn, destfile)
    end_ldif_outfile('GROUP', destfile)
    logger.info('Wrote LDIF to %r', ldiffile)

    logger.info('Generating pickle dump...')
    tmpfname = picklefile + '.tmp'
    pickle.dump(mbr2grp, open(tmpfname, 'wb'), pickle.HIGHEST_PROTOCOL)
    os.rename(tmpfname, picklefile)
    logger.info('Wrote pickle file to %r', picklefile)

    logger.info('Done %s', parser.prog)
コード例 #54
0
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    parser = add_commit_args(parser, default=False)

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    db = Factory.get('Database')()
    db.cl_init(change_program='account_affiliation_cleaner')

    clean_acc_affs(db)

    if args.commit:
        db.commit()
        logger.info("Committed all changes")
    else:
        db.rollback()
        logger.info("Dryrun, rolled back changes")
コード例 #55
0
def main(inargs=None):
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('tee', args)
    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)
    logger.info('Start of script %s', parser.prog)
    logger.debug("args: %r", args)

    fix_version(db, args.module_name, args.version, args.table,
                force=args.force)

    if args.commit:
        logger.info('Commiting changes')
        db.commit()
    else:
        logger.info('Rolling back changes')
        db.rollback()

    logger.info('Done with script %s', parser.prog)
コード例 #56
0
def generate_group_statistics(db, presenter, details=False):
    co = Factory.get("Constants")(db)

    presenter.write_header("Reports dealing with group entities")

    # Groups without any members at all, directly or indirectly
    result = db.query("""
        SELECT group_id
        FROM [:table schema=cerebrum name=group_info]
        EXCEPT
        SELECT group_id
        FROM [:table schema=cerebrum name=group_member]
        """)
    presenter.write_entity("Number of groups without any members at all",
                           result)

    # TODO 2007-01-17 amveha: Groups without any members at all,
    # directly or indirectly.

    # Groups without spread
    result = db.query(
        """
        SELECT group_id
        FROM [:table schema=cerebrum name=group_info]
        EXCEPT
        SELECT entity_id
        FROM [:table schema=cerebrum name=entity_spread]
        WHERE entity_type = :entity_type_group
        """, {"entity_type_group": int(co.entity_group)})

    presenter.write_entity("Number of groups without spread", result)

    # Groups without descriptions
    result = db.query("""
        SELECT group_id
        FROM [:table schema=cerebrum name=group_info]
        WHERE
            -- No description set...
            description is NULL OR
            -- ... or description is empty
            description LIKE ''
        """)
    presenter.write_entity("Number of groups without description", result)
コード例 #57
0
ファイル: converters.py プロジェクト: chrnux/cerebrum
def entity_cinfo(msg, subject, *args):
    """Convert address type and source constants."""
    c = Factory.get('Constants')(args[-1])

    x = c.ContactInfo(msg['data']['type'])
    attr = {
        c.contact_phone: 'phone',
        c.contact_phone_private: 'privatePhone',
        c.contact_fax: 'fax',
        c.contact_email: 'externalEmail',
        c.contact_url: 'homePage',
        c.contact_mobile_phone: 'cellPhone',
        c.contact_private_mobile: 'cellPhone',
        c.contact_private_mobile_visible: 'cellPhone'
    }.get(x) or str(x).capitalize()

    return scim.Event(scim.MODIFY,
                      subject=subject,
                      attributes=[attr])
コード例 #58
0
def get_account(ident, database):
    """Locate and return an account.

    If nothing is found, return None.
    """
    account = Factory.get("Account")(database)
    try:
        if (isinstance(ident, (int, long))
                or isinstance(ident, str) and ident.isdigit()):
            account.find(int(ident))
        else:
            account.find_by_name(ident)

        return account
    except Errors.NotFoundError:
        logger.warn("Cannot locate account associated with: %s", ident)
        return None

    assert False, "NOTREACHED"
コード例 #59
0
def get_group(ident, database):
    """Locate and return a group.

    If nothing suitable is found, return None.
    """
    group = Factory.get("Group")(database)
    try:
        if (isinstance(ident, (int, long))
                or isinstance(ident, str) and ident.isdigit()):
            group.find(int(ident))
        else:
            group.find_by_name(ident)

        return group
    except Errors.NotFoundError:
        logger.warn("Cannot locate group associated with: %s", ident)
        return None

    assert False, "NOTREACHED"
コード例 #60
0
ファイル: expire_notifier.py プロジェクト: chrnux/cerebrum
def get_account(ident, database):
    """Try to locate an account associated with L{ident}.

    @return:
      An account proxy associated with whatever ident points to, or None, if
      no account match is possible.
    """

    account = Factory.get("Account")(database)
    try:
        if (isinstance(ident, (int, long))
                or isinstance(ident, str) and ident.isdigit()):
            account.find(int(ident))
        else:
            account.find_by_name(ident)

        return account
    except Errors.NotFoundError:
        return None