def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="Set initial group_type values for known groups",
    )
    add_commit_args(parser)
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('tee', args)

    logger.info('Start %s', parser.prog)
    logger.debug('args: %r', args)

    db = Factory.get('Database')()

    logger.info("Setting group type for internal groups")
    count = update_internal_groups(db)
    logger.info("Set group type for %d groups", count)

    if has_module(db, 'sqlmodule_virtual_group'):
        logger.info("Setting group type for 'sqlmodule_virtual_group'")
        count = update_virtualgroups(db)
        logger.info("Set group type for %d groups", count)

    if args.commit:
        logger.info('Commiting changes')
        db.commit()
    else:
        logger.info('Rolling back changes')
        db.rollback()
    logger.info('Done %s', parser.prog)
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="Import deceased dates from Paga", )
    parser.add_argument(
        '-f',
        '--file',
        dest='filename',
        # default=default_person_file,
        help='Read and import deceased dates from %(metavar)s',
        metavar='csv-file',
    )
    add_commit_args(parser)
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)
    logger.info('Start of %s', parser.prog)
    logger.debug('args: %r', args)
    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)

    source = build_deceased_cache(read_csv_file(args.filename))
    process_deceased(db, source)

    if args.commit:
        logger.info('Commiting changes')
        db.commit()
    else:
        logger.info('Rolling back changes')
        db.rollback()
def make_parser():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-e', '--encoding',
        dest='codec',
        default=DEFAULT_ENCODING,
        type=codec_type,
        help="html encoding, defaults to %(default)s")
    parser.add_argument(
        '-t', '--template-folder',
        default=DEFAULT_TEMPLATE_FOLDER,
        help='Path to the template folder'
    )
    limits = parser.add_argument_group(
        "Limits",
        "Time limits for first and second warnings"
    )
    limits.add_argument('--limit1',
                        required=True,
                        type=int,
                        help='Time until expire date in days')
    limits.add_argument('--limit2',
                        required=True,
                        type=int,
                        help='Time until expire date in days')
    test_group = parser.add_argument_group('Testing',
                                           'Arguments useful when testing')
    test_group.add_argument(
        '-p', '--print-messages',
        action='store_true',
        help='Print messages to console'
    )
    add_commit_args(parser, commit_desc='Send emails to group owners')
    return parser
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    add_commit_args(parser)
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    db = Factory.get('Database')()
    const = Factory.get('Constants')(db)
    person = Factory.get('Person')(db)
    account = Factory.get('Account')(db)
    db.cl_init('set_account_type')

    # get list of all accounts in the database
    logger.info("getting accounts...")
    all_accounts = account.list(filter_expired=False, fetchall=True)

    # get list of all accounts missing account_type
    logger.info("getting accounts missing type...")
    accounts_missing_type = process_accounts(all_accounts, const, account)

    # set account type for accounts that have none
    logger.info("setting account type...")
    set_account_type(accounts_missing_type, person, account)

    # commit or rollback
    if args.commit:
        db.commit()
        logger.info("Committing all changes to DB")
    else:
        db.rollback()
        logger.info("Dryrun, rollback changes")
示例#5
0
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-n',
        '--names',
        action='append',
        type=six.text_type,
        help='Which rules from the config should be processed?',
        required=True)
    add_commit_args(parser)

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)
    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)

    co = Factory.get('Constants')(db)
    gr = Factory.get('Group')(db)
    rules = load_rules(co, args.names)
    process_spreads(gr, rules)
    if args.commit:
        logger.info('Committing changes')
        db.commit()
    else:
        logger.info('Rolling back changes')
        db.rollback()
    logger.info('Done with %s', parser.prog)
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="Process accounts for SITO employees")

    add_commit_args(parser)
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start of %s', parser.prog)
    logger.debug('args: %r', args)

    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)

    process_mail(db)

    if args.commit:
        logger.info('Commiting changes')
        db.commit()
    else:
        db.rollback()
        logger.info('Rolling back changes')
    logger.info('Done %s', parser.prog)
示例#7
0
def main(inargs=None):
    """Find moderatorless groups, make a nice table and send it to drift"""
    try:
        import argparse
    except ImportError:
        from Cerebrum.extlib import argparse
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-r',
                        '--recipient',
                        dest='recipient',
                        default=None,
                        help='Recipient of the report')
    logutils.options.install_subparser(parser)

    argutils.add_commit_args(parser, default=False)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    logger.info('START %s', parser.prog)
    logger.info('Extracting adminless groups')
    abandoned_manual_groups = get_abandoned_manual_groups()
    logger.info('Creating table')
    table = make_table(abandoned_manual_groups)
    args = parser.parse_args(inargs)
    if args.recipient:
        logger.info('Sending email to %s', args.recipient)
        email.sendmail(args.recipient, '*****@*****.**', get_title(),
                       table)
    else:
        logger.info('No email provided')
    logger.info('DONE %s', parser.prog)
def main():
    global uit_addresses_in_use

    parser = argparse.ArgumentParser(description=__doc__)
    add_commit_args(parser)
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args()
    Cerebrum.logutils.autoconf('cronjob', args)
    logger.info("Start %s", parser.prog)
    db = Factory.get('Database')()
    db.cl_init(change_program='process_uit_email')

    starttime = datetime.datetime.now()

    uit_addresses_in_use = get_existing_emails(db)
    process_mail(db)

    if args.commit:
        db.commit()
        logger.info("Committing all changes to DB")
    else:
        db.rollback()
        logger.info("Dryrun, rollback changes")

    running_time = datetime.datetime.now() - starttime
    logger.info("Done %s in %s", parser.prog, str(running_time))
示例#9
0
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="Import guest user data from SYSTEM-X", )
    parser.add_argument(
        'filename',
        help='Read and import SYSTEM-X guests from %(metavar)s',
        metavar='<file>',
    )
    add_commit_args(parser)
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start %s', parser.prog)
    logger.debug('args: %r', args)

    db = Factory.get('Database')()
    db.cl_init(change_program='import_SYSX')

    stats = process_sysx_persons(db, args.filename)

    logger.info("Stats: %r", dict(stats))

    if args.commit:
        logger.info('Commiting changes')
        db.commit()
    else:
        logger.info('Rolling back changes')
        db.rollback()
    logger.info('Done %s', parser.prog)
示例#10
0
def main(inargs=None):
    # Parse arguments
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-p',
                        '--pagafile',
                        metavar='filename',
                        help='Read national identities from given Paga file',
                        required=True)
    add_commit_args(parser)

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    try:
        file_obj = open(args.pagafile, 'r')
    except IOError:
        sys.exit("Could not open file: '{}'".format(args.pagafile))
    reader = csv.reader(file_obj, delimiter=';')

    row_fetched = 0
    row_fetch_max = 1000
    row_count = sum(1 for row in file_obj)
    file_obj.seek(0)
    mobile_count = 0

    # Initialize database
    db = Factory.get('Database')()
    db.cl_init(change_program='fetch_mobile')

    logger.info("Updating BAS with ICE numbers from Difi's"
                "'Kontakt- og reservasjonssregister'.")

    # Dummy username
    token = read_password('difi', 'oppslag.uit.no')

    while row_fetched < row_count:
        # GET all national identities
        national_identies = get_national_identies(reader, row_fetch_max)
        # GET all mobile phone numbers
        mobile_phones = get_mobile_list(token, national_identies)
        # UPDATE BAS
        mobile_count += update_bas(db, mobile_phones)

        row_fetched += row_fetch_max

    logger.debug("############")
    logger.debug("Lines in pagafile: %s" % row_count)

    if mobile_count > 0:
        logger.info("%s new ICE numbers added to BAS." % mobile_count)
        if args.commit:
            db.commit()
            logger.info("Committed all changes.")
        else:
            db.rollback()
            logger.info("Dryrun. Rollback changes.")
    else:
        logger.info("No new ICE numbers found.")
示例#11
0
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="Sync student subject groups", )
    parser.add_argument(
        '--include',
        type=re.compile,
        action='append',
        help='only create groups for edu units that match the given regex',
    )
    parser.add_argument(
        '--activity-groups',
        dest='act_file',
        help='build activity sub-groups from a '
        'undervisningsaktiviteter.xml file',
    )
    parser.add_argument(
        'unit_file',
        help='a undervisningenheter.xml file',
    )

    add_commit_args(parser)
    Cerebrum.logutils.options.install_subparser(parser)
    parser.set_defaults(logger_level=DEFAULT_LOG_LEVEL)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf(DEFAULT_LOG_PRESET, args)

    logger.info('Start of script %s', parser.prog)
    logger.debug('args: %r', args)

    with timer('reading FS data', logging.INFO):
        units = get_units(args.unit_file,
                          activity_file=args.act_file,
                          filter_units=args.include)
        nested = bool(args.act_file)
        edu_groups = build_group_idents(units)

    with timer('preparing Cerebrum data', logging.INFO):
        db = Factory.get("Database")()
        db.cl_init(change_program=parser.prog)
        builder = EduGroupBuilder(db, nested)

    total = len(edu_groups)
    with timer('updating groups', logging.INFO):
        for n, ident in enumerate(sorted(edu_groups), 1):
            unit = edu_groups[ident]
            logger.debug('processing groups for %s %r', ident, unit)
            builder.sync_edu_group_students(ident, unit)
            builder.sync_edu_group_educators(ident, unit)
            if n % 100 == 0:
                logger.debug('processed %d/%d units', n, total)

    if args.commit:
        logger.info('Commiting changes')
        db.commit()
    else:
        logger.info('Rolling back changes')
        db.rollback()

    logger.info('Done with script %s', parser.prog)
def main(inargs=None):
    """
    Find all direct and indirect members of a group and create a new and
    flatter version.
    """
    try:
        import argparse
    except ImportError:
        from Cerebrum.extlib import argparse
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-t',
                        '--target-group',
                        dest='target_group',
                        required=True,
                        default=None,
                        help='Group to be flattened')
    parser.add_argument('-d',
                        '--destination-group',
                        dest='destination_group',
                        required=True,
                        default=None,
                        help='Flattened group')
    logutils.options.install_subparser(parser)

    argutils.add_commit_args(parser, default=False)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    logger.info('START %s', parser.prog)
    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)
    co = Factory.get('Constants')(db)
    gr = Factory.get('Group')(db)
    try:
        gr.find_by_name(args.target_group)
    except Errors.NotFoundError:
        parser.error('Target group {} does not exist.'.format(
            args.target_group))
    logger.info('Searching group %s for direct and indirect members')
    flattened = gr.search_members(group_id=gr.entity_id,
                                  indirect_members=True,
                                  member_type=co.entity_account)
    logger.info('Preparing flattened group %s', args.destination_group)
    prepare_empty(db, args)
    logger.info('Updating: Group %s to contain %i members',
                args.destination_group, len(flattened))
    update_flattened_derivative(db, flattened, args.destination_group)

    if args.commit:
        logger.info("Committing changes")
        db.commit()
    else:
        db.rollback()
        logger.info("Changes rolled back (dryrun)")

    logger.info('DONE %s', parser.prog)
def main(inargs=None):
    # --firstname name      : persons first name
    # --lastname name       : persons last name
    # --account name        : account name. The owner of the account is changed
    parser = argparse.ArgumentParser(
        description="Set person name (override)", )
    parser.add_argument(
        '--firstname',
        required=True,
        type=nonempty_type,
    )
    parser.add_argument(
        '--lastname',
        required=True,
        type=nonempty_type,
    )
    parser.add_argument(
        '--account',
        dest='username',
        required=True,
        type=nonempty_type,
        metavar='username',
    )
    add_commit_args(parser.add_argument_group('Database'))
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    # TODO: Change to cronjob? Raise default log level?
    Cerebrum.logutils.autoconf('tee', args)

    logger.info('Start of %s', parser.prog)
    logger.debug('args: %r', args)

    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)
    dryrun = not args.commit

    person = get_person(db, args.username)
    change_person_name(db, person, args.firstname, args.lastname)

    dryrun = not args.commit
    if not dryrun:
        is_sure = ask_yn("Do you want to store these changes?")
        logger.info('Prompt response=%r', is_sure)
        dryrun = not is_sure

    if dryrun:
        logger.info('Rolling back changes')
        db.rollback()
        print("Abort (use --commit and answer Y to commit changes)")
    else:
        logger.info('Commiting changes')
        db.commit()
        print("Changes commited")
    logger.info('Done %s', parser.prog)
示例#14
0
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="Sync passwords from Caesar to current database", )
    what = parser.add_mutually_exclusive_group(required=True)
    what.add_argument(
        '-a',
        '--all',
        action='store_true',
        default=False,
        help='Sync all users in current database',
    )
    what.add_argument(
        '-f',
        '--file',
        dest='filename',
        type=existing_file,
        help='Sync usernames from file %(metavar)s',
        metavar='FILE',
    )
    what.add_argument(
        '-n',
        '--name',
        dest='username',
        help='Sync a single username %(metavar)s',
        metavar='USER',
    )
    add_commit_args(parser)
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start %s', parser.prog)
    logger.debug('args: %r', args)

    db = Factory.get('Database')()
    db.cl_init(change_program='sync_passwords')

    with AccountBridge() as bridge:
        if args.username:
            sync_one_account(bridge, args.username)
        elif args.filename:
            sync_many(db, bridge, read_usernames(args.filename))
        elif args.all:
            sync_many(db, bridge, fetch_usernames(db))

    if args.commit:
        logger.info('Commiting changes')
        db.commit()
    else:
        db.rollback()
        logger.info('Rolling back changes')

    logger.info('Done %s', parser.prog)
示例#15
0
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="Import SITO XML files into the Cerebrum database")

    parser.add_argument(
        '-p', '--person-file',
        help='Read and import persons from %(metavar)s',
        metavar='xml-file',
    )
    parser.add_argument(
        '-o', '--ou-file',
        help='Read and import org units from %(metavar)s',
        metavar='xml-file',
    )
    add_commit_args(parser)
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start of %s', parser.prog)
    logger.debug('args: %r', args)

    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)

    if args.ou_file:
        logger.info('Fetching OUs from %r', args.ou_file)
        ou_list = list(generate_ous(args.ou_file))
        logger.info('Importing %d OUs', len(ou_list))
        import_ous(db, ou_list)
        logger.info('OU import done')

    if args.person_file:
        logger.info('Loading existing affiliations')
        aff_set = load_sito_affiliations(db)
        logger.info('Fetching persons from %r', args.person_file)
        person_list = list(generate_persons(args.person_file))
        logger.info('Importing %d persons', len(person_list))
        # Note: import_person updates the aff_set
        import_persons(db, person_list, aff_set)
        logger.info('Cleaning old affiliations')
        remove_old_affiliations(db, aff_set)
        logger.info('Person import done')

    if args.commit:
        logger.info('Commiting changes')
        db.commit()
    else:
        db.rollback()
        logger.info('Rolling back changes')
    logger.info('Done %s', parser.prog)
示例#16
0
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-e',
                        '--encoding',
                        dest='codec',
                        default=DEFAULT_ENCODING,
                        type=codec_type,
                        help="html encoding, defaults to %(default)s")
    parser.add_argument('-t',
                        '--template-folder',
                        default=DEFAULT_TEMPLATE_FOLDER,
                        help='Path to the template folder')
    parser.add_argument(
        '-d',
        '--dates',
        type=lambda date: datetime.datetime.strptime(date, '%d-%m'),
        default=None,
        action='append',
        help='Check date before running the script. Yearly dates to run the '
        'script on the format: <day>-<month>. The script runs normally if'
        ' no date is given.')
    test_group = parser.add_argument_group('Testing',
                                           'Arguments useful when testing')
    test_group.add_argument('-p',
                            '--print-messages',
                            action='store_true',
                            help='Print messages to console')
    test_mutex = test_group.add_mutually_exclusive_group()
    test_mutex.add_argument(
        '-o',
        '--only-owner',
        default=None,
        help='Only search for groups owned by the given account')
    test_mutex.add_argument('--ten',
                            action='store_true',
                            help='Only process 10 group owners')
    add_commit_args(parser, commit_desc='Send emails to group owners')

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start of script %s', parser.prog)
    logger.debug("args: %r", args)

    if check_date(args.dates):
        db = Factory.get('Database')()
        send_mails(db, args)
    else:
        logger.info('Today is not in the given list of dates')

    logger.info('Done with script %s', parser.prog)
示例#17
0
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="This utility creates an exchange email for a user", )
    parser.add_argument(
        '-n',
        '--noprimary',
        action='store_false',
        dest='primary',
        default=True,
        help='Do not set primary address',
    )

    parser.add_argument(
        'account',
        help='Set email address for account name %(metavar)s',
    )
    parser.add_argument(
        'email',
        type=email_type,
        help='Set email address to %(metavar)s',
    )
    add_commit_args(parser)
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('tee', args)

    logger.info('Start of %s', parser.prog)
    logger.debug('args: %r', args)

    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)

    try:
        set_mail(db, args.account, args.email[0], args.email[1], args.primary)
    except ValueError as e:
        logger.error('Unable to set email: %s', e)
        raise SystemExit(1)
    except Exception as e:
        logger.error('Unhandled error', exc_info=True)
        raise SystemExit(2)

    if args.commit:
        logger.info('Commiting changes')
        db.commit()
    else:
        logger.info('Rolling back changes')
        db.rollback()
    logger.info('Done %s', parser.prog)
示例#18
0
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="Import Paga XML files into the Cerebrum database")

    parser.add_argument(
        '-p',
        '--person-file',
        required=True,
        help='Read and import persons from %(metavar)s',
        metavar='xml-file',
    )
    parser.add_argument(
        '--delete',
        '--include-delete',
        dest='delete',
        action='store_true',
        default=False,
        help='Delete old affiliations',
    )
    add_commit_args(parser)
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start of %s', parser.prog)
    logger.debug('args: %r', args)

    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)

    if args.delete:
        old_affs = load_paga_affiliations(db)
    else:
        old_affs = None

    person_callback = PersonProcessor(db, old_affs=old_affs)
    PagaDataParserClass(args.person_file, person_callback)

    if args.delete:
        remove_old_affiliations(db, old_affs)

    if args.commit:
        logger.info('Commiting changes')
        db.commit()
    else:
        logger.info('Rolling back changes')
        db.rollback()
    logger.info('Done %s', parser.prog)
示例#19
0
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="Import guest user data from SYSTEM-X", )
    parser.add_argument(
        '--no-email',
        dest='send_email',
        action='store_false',
        default=True,
        help='Omit sending email to new users',
    )
    parser.add_argument(
        'filename',
        help='Process SYSTEM-X guests imported from %(metavar)s',
        metavar='<file>',
    )
    add_commit_args(parser)
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start %s', parser.prog)
    logger.debug('args: %r', args)

    # Do *NOT* send e-mail if running in dryrun mode
    send_email = args.send_email if args.commit else False

    db = Factory.get('Database')()
    db.cl_init(change_program='process_systemx')

    logger.info('Reading file=%r ...', args.filename)
    sysx = SYSX(args.filename)
    sysx.list()

    logger.info('Fetching sysx cerebrum data...')
    persons, accounts = get_existing_accounts(db)

    logger.info('Processing sysx accounts...')
    build = Build(db, sysx, persons, accounts, send_email)
    build.process_all()

    if args.commit:
        logger.info('Commiting changes')
        db.commit()
    else:
        logger.info('Rolling back changes')
        db.rollback()
    logger.info('Done %s', parser.prog)
示例#20
0
def main(inargs=None):

    # Default values
    # FIXME: Get this from somewhere sensible instead of hardcoding it
    default_perspective = 'FS'

    # Parse arguments
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-p', '--perspective',
        help='filter process on determined perspective code e.g FS',
        default=default_perspective)
    parser = add_commit_args(parser, default=False)

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    db = Factory.get('Database')()
    db.cl_init(change_program='process_ou_groups.py')

    processor = OuGroupProcessor(db, args.perspective)
    processor.process()

    if args.commit:
        db.commit()
        logger.info("Committing all changes to DB")
    else:
        db.rollback()
        logger.info("Dryrun, rolling back changes")
示例#21
0
def main(inargs=None):
    """Start consuming messages."""
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-c',
                        '--config',
                        dest='configfile',
                        metavar='FILE',
                        default=None,
                        help='Use a custom configuration file')
    parser = add_commit_args(parser)

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    prog_name = parser.prog.rsplit('.', 1)
    database = Factory.get('Database')()
    database.cl_init(change_program=parser.prog)
    config = load_config(filepath=args.configfile)

    if not args.commit:
        database.commit = database.rollback

    logger.info('Starting %s', prog_name)
    consumer = get_consumer(functools.partial(callback, database),
                            prog_name,
                            config=config.consumer)
    with consumer:
        try:
            consumer.start()
        except KeyboardInterrupt:
            consumer.stop()
        consumer.close()
    logger.info('Stopping %s', prog_name)
示例#22
0
def main(inargs=None):
    parser = argparse.ArgumentParser(__doc__)
    parser.add_argument(
        '-r',
        '--role_file',
        required=True,
        help='Import role groups from %(metavar)s',
        metavar='<file>',
    )
    parser = add_commit_args(parser)

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start %s', parser.prog)
    logger.debug('args: %r', args)

    RolesXmlParser(args.role_file, rolle_helper)

    if args.commit:
        logger.info("Commiting changes")
        db.commit()
    else:
        logger.info("Dryrun, rollback changes")
        db.rollback()

    logger.info('Done %s', parser.prog)
示例#23
0
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('--user_spread')
    parser.add_argument('--url', required=True)
    parser = add_commit_args(parser, default=True)

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)

    if args.user_spread:
        user_spread = getattr(co, args.user_spread)
    else:
        user_spread = co.spread_uit_ad_account

    logger.info("Trying to connect to %s", args.url)
    ADquickUser = ADquickSync(db, co, logger, url=args.url)
    try:
        ADquickUser.quick_sync(user_spread, args.dry_run)
    except socket.error as m:
        if m[0] == 111:
            logger.critical(
                "'%s' while connecting to %s, sync service stopped?", m[1],
                args.url)
        else:
            logger.error("ADquicksync failed with socket error: %s", m)
    except Exception as m:
        logger.error("ADquicksync failed: %s", m)
示例#24
0
def main(inargs=None):
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description="Send expire date notification emails",
        epilog=epilog,
    )
    parser.add_argument(
        '--cache',
        dest='cache_file',
        required=True,
        help='Use cache %(metavar)s for comparison between runs',
        metavar='<cache-file>',
    )
    parser.add_argument(
        '--generate-info',
        dest='report_file',
        help='Write a HTML report on expired users to %(metavar)s',
        metavar='<report-file>',
    )
    commit_group = parser.add_argument_group(
        'Commiting',
        'Unless --commit is provided, script will run in dryrun mode. '
        'In this mode, no emails will be sent.')
    add_commit_args(commit_group)
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info("Start %s", parser.prog)
    logger.debug("args: %s", repr(args))

    db = Factory.get('Database')()

    expire_util = UserExpireUtil(db, not args.commit)

    cache = load_cache(args.cache_file)
    expire_util.check_users(cache)
    dump_cache(cache, args.cache_file)

    if args.report_file:
        logger.info("Generating report")
        write_report(cache, args.report_file)
        logger.info("Report written to %r", args.report_file)

    logger.info("Done %s", parser.prog)
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-o',
        '--outfile',
        required=True,
        metavar='<filename>',
    )
    parser.add_argument(
        '-t',
        '--test',
        help='Use test data',
        action='store_true',
    )
    add_commit_args(parser)

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args()
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start %s', parser.prog)
    logger.info('args=%r', args)

    logger.info('Fetching display name changes from %r (test=%r)', URL,
                args.test)
    changes = get_changes(URL, test=args.test)

    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)
    const = Factory.get('Constants')(db)
    account = Factory.get('Account')(db)
    person = Factory.get('Person')(db)

    logger.info('Updating %d display names', len(changes))
    change_names(changes, args.outfile, const, account, person, db)

    if args.commit:
        db.commit()
        logger.info("Committed changes to DB")
    else:
        db.rollback()
        logger.info("Dryrun, rollback changes")

    logger.info('Done %s', parser.prog)
示例#26
0
def main(inargs=None):
    parser = argparse.ArgumentParser(description="Delete accounts")
    what = parser.add_mutually_exclusive_group(required=True)
    what.add_argument(
        '-f',
        '--file',
        dest='filename',
        help="Delete account_ids found in %(metavar)s",
        metavar='filename',
    )
    what.add_argument(
        '-a',
        '--account',
        dest='account_id',
        type=int,
        help="Delete account with %(metavar)s",
        metavar='account_id',
    )
    add_commit_args(parser, default=False)

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('tee', args)

    logger.info('Start of %s', parser.prog)
    logger.debug('args: %r', args)

    db = Factory.get('Database')()

    if args.filename:
        for account_id in read_integers(args.filename):
            process_account(db, account_id)
    else:
        process_account(db, args.account_id)

    if args.commit:
        logger.info('Committing changes')
        db.commit()
    else:
        logger.info('Rolling back changes')
        db.rollback()
    logger.info('Done %s', parser.prog)
示例#27
0
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description='Update automatic groups so that they reflect the '
        'OU-structure of an organization')
    parser.add_argument(
        '--perspective',
        type=six.text_type,
        help='Set the system perspective to fetch the OU structure from, '
        'e.g. SAP or FS.',
        required=True)
    parser.add_argument(
        '--prefix',
        type=six.text_type,
        action='append',
        default=[],
        help='Prefix for the automatic groups this script creates',
        required=True)
    add_commit_args(parser)

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)
    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)
    co = Factory.get('Constants')(db)

    perspective = get_constant(db, parser, co.OUPerspective, args.perspective)
    ou = Factory.get('OU')(db)
    gr = Factory.get('Group')(db)

    logger.info('Caching OUs')
    ou_id2sko = cache_stedkoder(ou)
    for prefix in args.prefix:
        process_prefix(db, ou, gr, co, prefix, perspective, ou_id2sko)

    if args.commit:
        logger.info('Committing changes')
        db.commit()
    else:
        logger.info('Rolling back changes')
        db.rollback()
    logger.info('Done with %s', parser.prog)
def main():
    date_tmp = time.localtime()
    default_dump_file = get_dump_file(date_tmp)
    default_date = get_date(date_tmp)

    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-d', '--dump_file',
        default=default_dump_file,
        help='Removed data will be dumped into this file.'
    )
    parser.add_argument(
        '-D', '--date',
        default=default_date,
        help='All entries listed in change_program and change_type, older than'
             ' this date, will be deleted. format is: YYYY-MM-DD'
    )
    parser.add_argument(
        '-c', '--change_program',
        default=None,
        help='Comma sepparated list. All entries from these scripts will be '
             'deleted'
    )
    parser = add_commit_args(parser, default=True)

    args, _rest = parser.parse_known_args()
    parser.add_argument(
        '-C', '--change_type',
        required=args.change_program is None,
        default=None,
        help='Comma sepparated list. All entries of these change_types will be'
             ' deleted.'
    )

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args()
    Cerebrum.logutils.autoconf("cronjob", args)

    change_type_list = None
    change_program_list = None
    db = Factory.get('Database')()

    if not args.commit:
        db.commit = db.rollback

    if args.change_type:
        change_type_list = args.change_type.split(",")
    if args.change_program:
        change_program_list = args.change_program.split(",")

    log = AccessLog(args.dump_file, db, change_type_list)
    id_list = log.get_change_ids(args.date, change_program_list,
                                 change_type_list)
    log.delete_change_ids(id_list)
示例#29
0
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="Process accounts for SITO employees")

    parser.add_argument(
        '-p',
        '--person-file',
        default=default_person_file,
        help='Process persons from %(metavar)s',
        metavar='xml-file',
    )
    add_commit_args(parser)
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start of %s', parser.prog)
    logger.debug('args: %r', args)

    db = Factory.get('Database')()
    db.cl_init(change_program='process_sito')
    builder = Build(db)

    logger.info('Fetching cerebrum data')
    builder.persons, builder.accounts = get_existing_accounts(db)

    logger.info('Reading persons from %r', args.person_file)
    source_data = generate_persons(args.person_file)

    logger.info('Processing persons')
    builder.process(source_data)

    if args.commit:
        logger.info('Commiting changes')
        db.commit()
    else:
        db.rollback()
        logger.info('Rolling back changes')
    logger.info('Done %s', parser.prog)
示例#30
0
def main(inargs=None):
    logger = logging.getLogger(__name__)

    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('--filename',
                        default=default_source_file,
                        dest='source_file',
                        help='path to XML file with System Accounts')
    parser = add_commit_args(parser)

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    logger.info('Starting to cache system accounts from %s', args.source_file)
    sys_acc_parser = SystemAccountsParser(args.source_file,
                                          system_account_callback, logger)
    logger.info('Finished caching system accounts')

    # Get default values
    logger.info("Caching default values to use in account processing...")
    db = Factory.get('Database')()
    db.cl_init(change_program='activate_account')

    ac = Factory.get('Account')(db)
    co = Factory.get('Constants')(db)

    ac.find_by_name(cereconf.INITIAL_ACCOUNTNAME)
    default_creator_id = ac.entity_id
    default_owner_id = ac.owner_id
    default_owner_type = ac.owner_type
    default_source_system = co.system_sysacc
    valid_contact_types = {
        co.contact_email.str: co.contact_email,
        co.contact_url.str: co.contact_url
    }
    logger.info("Finished caching default values.")

    logger.info('Starting to process accounts')
    for account in sys_acc_parser.system_accounts_cache:
        process_account(db, account, logger, default_owner_id,
                        default_owner_type, default_creator_id,
                        default_source_system, valid_contact_types)
    logger.info('Finished processing accounts')

    if args.commit:
        logger.info("Commiting changes!")
        db.commit()

    else:
        logger.info("Rolling back changes!")
        db.rollback()
示例#31
0
def main():
    # parsing
    parser = argparse.ArgumentParser()
    parser = add_commit_args(parser, default=True)
    parser.add_argument(
        '-v', '--verbose',
        action='count')
    parser.add_argument(
        '-p', '--person-file',
        dest='personfile',
        default=pj(cereconf.FS_DATA_DIR, "merged_persons.xml"))
    parser.add_argument(
        '-s', '--studieprogram-file',
        dest='studieprogramfile',
        default=pj(cereconf.FS_DATA_DIR, "studieprog.xml"))
    parser.add_argument(
        '-g', '--generate-groups',
        dest='gen_groups',
        action='store_true')
    parser.add_argument(
        '-d', '--include-delete',
        dest='include_delete',
        action='store_true')

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args()
    Cerebrum.logutils.autoconf('cronjob', args)

    source = 'system_lt'
    rules = [
        ('tilbud', ('_kontakt', '_hjemsted', None)),
        ('aktiv', ('_semadr', '_hjemsted', None)),
        ('evu', ('_job', '_hjem', None)),
        ('privatist_studieprogram', ('_semadr', '_hjemsted', None)),
    ]
    adr_map = {
        '_arbeide': ('adrlin1_arbeide', 'adrlin2_arbeide', 'adrlin3_arbeide',
                     'postnr_arbeide', 'adresseland_arbeide'),
        '_hjemsted': ('adrlin1_hjemsted', 'adrlin2_hjemsted',
                      'adrlin3_hjemsted', 'postnr_hjemsted',
                      'adresseland_hjemsted'),
        '_semadr': ('adrlin1_semadr', 'adrlin2_semadr', 'adrlin3_semadr',
                    'postnr_semadr', 'adresseland_semadr'),
        '_job': ('adrlin1_job', 'adrlin2_job', 'adrlin3_job', 'postnr_job',
                 'adresseland_job'),
        '_hjem': ('adrlin1_hjem', 'adrlin2_hjem', 'adrlin3_hjem',
                  'postnr_hjem', 'adresseland_hjem'),
        '_kontakt': ('adrlin1_kontakt', 'adrlin2_kontakt', 'adrlin3_kontakt',
                     'postnr_kontakt', 'adresseland_kontakt'),
        '_besok_adr': ('institusjonsnr', 'faknr', 'instituttnr', 'gruppenr')
    }
    reservation_query = ('tilbud', 'aktiv', 'privatist_studieprogram', 'evu',)
    fs_importer = FsImporterUia(args.gen_groups,
                                args.include_delete, args.commit,
                                args.studieprogramfile, source, rules, adr_map,
                                reservation_query=reservation_query)

    StudentInfo.StudentInfoParser(args.personfile,
                                  fs_importer.process_person_callback,
                                  logger)

    if args.include_delete:
        fs_importer.rem_old_aff()

    if args.commit:
        fs_importer.db.commit()
        logger.info('Changes were committed to the database')
    else:
        fs_importer.db.rollback()
        logger.info('Dry run. Changes to the database were rolled back')

    logger.info("Found %d persons without name.", fs_importer.no_name)
    logger.info("Completed")
示例#32
0
def main():
    parser = argparse.ArgumentParser(
        description='''Merges all information about a person identified by 
        entity_id into the new person, not overwriting existing values in 
        new person.  The old_person entity is permanently removed from the
         database.''')

    # Add commit/dryrun arguments
    parser = add_commit_args(parser)

    parser.add_argument(
        '--old',
        help='Old entity_id',
        required=True,
        type=int)
    parser.add_argument(
        '--new',
        help='New entity_id',
        required=True,
        type=int)
    parser.add_argument(
        '--pq-uio',
        dest='with_uio_pq',
        help="Transfer uio-printerquotas",
        action='store_true')
    parser.add_argument(
        '--pq-uia',
        dest='with_uia_pq',
        help='Transfer uia-printerquotas',
        action='store_true')
    parser.add_argument(
        '--ephorte-uio',
        dest='with_uio_ephorte',
        help='transfer uio-ephorte roles',
        action='store_true')
    parser.add_argument(
        '--voip-uio',
        dest='with_uio_voip',
        help='transfer voip objects',
        action='store_true')

    logutils.options.install_subparser(parser)
    args = parser.parse_args()
    logutils.autoconf('tee', args)

    logger.info('Start of script %s', parser.prog)
    logger.debug('args: %r', args)

    #
    # Initialize globals
    #
    db = Factory.get('Database')()
    db.cl_init(change_program="join_persons")

    old_person = Factory.get('Person')(db)
    old_person.find(args.old)
    new_person = Factory.get('Person')(db)
    new_person.find(args.new)
    person_join(old_person, new_person, args.with_uio_pq, args.with_uia_pq,
                args.with_uio_ephorte, args.with_uio_voip, db)
    old_person.delete()

    if args.commit:
        db.commit()
        logger.info('Changes were committed to the database')
    else:
        db.rollback()
        logger.info('Dry run. Changes to the database were rolled back')

    logger.info('Done with script %s', parser.prog)