コード例 #1
0
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-n',
        '--names',
        action='append',
        type=six.text_type,
        help='Which rules from the config should be processed?',
        required=True)
    add_commit_args(parser)

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)
    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)

    co = Factory.get('Constants')(db)
    gr = Factory.get('Group')(db)
    rules = load_rules(co, args.names)
    process_spreads(gr, rules)
    if args.commit:
        logger.info('Committing changes')
        db.commit()
    else:
        logger.info('Rolling back changes')
        db.rollback()
    logger.info('Done with %s', parser.prog)
コード例 #2
0
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    source_arg = parser.add_argument(
        '-s',
        '--source-system',
        default='SAP',
        metavar='SYSTEM',
        help='Source system to remove id number from')
    id_type_arg = parser.add_argument('-e',
                                      '--external-id-type',
                                      default='NO_BIRTHNO',
                                      metavar='IDTYPE',
                                      help='External ID type')
    parser.add_argument(
        '-c',
        '--commit',
        action='store_true',
        dest='commit',
        default=False,
        help='Commit changes (default: log changes, do not commit)')
    parser.add_argument(
        '-o',
        '--output',
        metavar='FILE',
        default='-',
        help='The file to print the report to, defaults to stdout')
    parser.add_argument('--codec',
                        dest='codec',
                        default='utf-8',
                        type=argutils.codec_type,
                        help="Output file encoding, defaults to %(default)s")

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)
    pe = Factory.get('Person')(db)
    db.cl_init(change_program='remove_src_fnrs')

    get_const = partial(argutils.get_constant, db, parser)
    ssys = get_const(co.AuthoritativeSystem, args.source_system, source_arg)
    external_id_type = get_const(co.EntityExternalId, args.external_id_type,
                                 id_type_arg)
    logutils.autoconf('cronjob', args)
    logger.info('Start of script {}'.format(parser.prog))
    logger.debug('args: {}'.format(args))
    logger.info('source_system: {}'.format(text_type(ssys)))
    logger.info('external_id_type: {}'.format(text_type(external_id_type)))
    SER = SrcExtidRemover(co, pe, ssys, external_id_type)
    SER.remover()
    SER.get_output_stream(args.output, args.codec)
    SER.write_csv_report()
    logger.info('Report written to %s', SER.stream.name)
    if args.commit:
        db.commit()
        logger.debug('Committed all changes')
    else:
        db.rollback()
        logger.debug('Rolled back all changes')
    logger.info('Script %s is done', parser.prog)
コード例 #3
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '-t', '--type',
        dest='types',
        action='append',
        choices=['sympa'],
        required=True)
    parser.add_argument(
        '-m', '--max',
        dest='max_requests',
        default=999999,
        help='Perform up to this number of requests',
        type=int)
    parser.add_argument(
        '-p', '--process',
        dest='process',
        action='store_true',
        help='Perform the queued operations')

    logutils.options.install_subparser(parser)
    args = parser.parse_args()
    logutils.autoconf('bofhd_req', args)

    logger.info('Start of script %s', parser.prog)
    logger.debug('args: %r', args)

    if args.process:
        rp = process_requests.RequestProcessor(db, const)
        rp.process_requests(operations_map, args.types, args.max_requests)
コード例 #4
0
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('--user_spread')
    parser.add_argument('--url', required=True)
    parser = add_commit_args(parser, default=True)

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)

    if args.user_spread:
        user_spread = getattr(co, args.user_spread)
    else:
        user_spread = co.spread_uit_ad_account

    logger.info("Trying to connect to %s", args.url)
    ADquickUser = ADquickSync(db, co, logger, url=args.url)
    try:
        ADquickUser.quick_sync(user_spread, args.dry_run)
    except socket.error as m:
        if m[0] == 111:
            logger.critical(
                "'%s' while connecting to %s, sync service stopped?", m[1],
                args.url)
        else:
            logger.error("ADquicksync failed with socket error: %s", m)
    except Exception as m:
        logger.error("ADquicksync failed: %s", m)
コード例 #5
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('-t',
                        '--type',
                        dest='types',
                        action='append',
                        choices=['sympa'],
                        required=True)
    parser.add_argument('-m',
                        '--max',
                        dest='max_requests',
                        default=999999,
                        help='Perform up to this number of requests',
                        type=int)
    parser.add_argument('-p',
                        '--process',
                        dest='process',
                        action='store_true',
                        help='Perform the queued operations')

    logutils.options.install_subparser(parser)
    args = parser.parse_args()
    logutils.autoconf('bofhd_req', args)

    logger.info('Start of script %s', parser.prog)
    logger.debug('args: %r', args)

    if args.process:
        rp = process_requests.RequestProcessor(db, const)
        rp.process_requests(operations_map, args.types, args.max_requests)
コード例 #6
0
def main(inargs=None):
    """Find moderatorless groups, make a nice table and send it to drift"""
    try:
        import argparse
    except ImportError:
        from Cerebrum.extlib import argparse
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-r',
                        '--recipient',
                        dest='recipient',
                        default=None,
                        help='Recipient of the report')
    logutils.options.install_subparser(parser)

    argutils.add_commit_args(parser, default=False)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    logger.info('START %s', parser.prog)
    logger.info('Extracting adminless groups')
    abandoned_manual_groups = get_abandoned_manual_groups()
    logger.info('Creating table')
    table = make_table(abandoned_manual_groups)
    args = parser.parse_args(inargs)
    if args.recipient:
        logger.info('Sending email to %s', args.recipient)
        email.sendmail(args.recipient, '*****@*****.**', get_title(),
                       table)
    else:
        logger.info('No email provided')
    logger.info('DONE %s', parser.prog)
コード例 #7
0
def main(inargs=None):

    # Default values
    # FIXME: Get this from somewhere sensible instead of hardcoding it
    default_perspective = 'FS'

    # Parse arguments
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-p', '--perspective',
        help='filter process on determined perspective code e.g FS',
        default=default_perspective)
    parser = add_commit_args(parser, default=False)

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    db = Factory.get('Database')()
    db.cl_init(change_program='process_ou_groups.py')

    processor = OuGroupProcessor(db, args.perspective)
    processor.process()

    if args.commit:
        db.commit()
        logger.info("Committing all changes to DB")
    else:
        db.rollback()
        logger.info("Dryrun, rolling back changes")
コード例 #8
0
def main(inargs=None):
    """Start consuming messages."""
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-c',
                        '--config',
                        dest='configfile',
                        metavar='FILE',
                        default=None,
                        help='Use a custom configuration file')
    parser = add_commit_args(parser)

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    prog_name = parser.prog.rsplit('.', 1)
    database = Factory.get('Database')()
    database.cl_init(change_program=parser.prog)
    config = load_config(filepath=args.configfile)

    if not args.commit:
        database.commit = database.rollback

    logger.info('Starting %s', prog_name)
    consumer = get_consumer(functools.partial(callback, database),
                            prog_name,
                            config=config.consumer)
    with consumer:
        try:
            consumer.start()
        except KeyboardInterrupt:
            consumer.stop()
        consumer.close()
    logger.info('Stopping %s', prog_name)
コード例 #9
0
def main(inargs=None):
    """
    Find all direct and indirect members of a group and create a new and
    flatter version.
    """
    try:
        import argparse
    except ImportError:
        from Cerebrum.extlib import argparse
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-t',
                        '--target-group',
                        dest='target_group',
                        required=True,
                        default=None,
                        help='Group to be flattened')
    parser.add_argument('-d',
                        '--destination-group',
                        dest='destination_group',
                        required=True,
                        default=None,
                        help='Flattened group')
    logutils.options.install_subparser(parser)

    argutils.add_commit_args(parser, default=False)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    logger.info('START %s', parser.prog)
    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)
    co = Factory.get('Constants')(db)
    gr = Factory.get('Group')(db)
    try:
        gr.find_by_name(args.target_group)
    except Errors.NotFoundError:
        parser.error('Target group {} does not exist.'.format(
            args.target_group))
    logger.info('Searching group %s for direct and indirect members')
    flattened = gr.search_members(group_id=gr.entity_id,
                                  indirect_members=True,
                                  member_type=co.entity_account)
    logger.info('Preparing flattened group %s', args.destination_group)
    prepare_empty(db, args)
    logger.info('Updating: Group %s to contain %i members',
                args.destination_group, len(flattened))
    update_flattened_derivative(db, flattened, args.destination_group)

    if args.commit:
        logger.info("Committing changes")
        db.commit()
    else:
        db.rollback()
        logger.info("Changes rolled back (dryrun)")

    logger.info('DONE %s', parser.prog)
コード例 #10
0
def main(inargs=None):
    logger = logging.getLogger(__name__)

    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('--filename',
                        default=default_source_file,
                        dest='source_file',
                        help='path to XML file with System Accounts')
    parser = add_commit_args(parser)

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    logger.info('Starting to cache system accounts from %s', args.source_file)
    sys_acc_parser = SystemAccountsParser(args.source_file,
                                          system_account_callback, logger)
    logger.info('Finished caching system accounts')

    # Get default values
    logger.info("Caching default values to use in account processing...")
    db = Factory.get('Database')()
    db.cl_init(change_program='activate_account')

    ac = Factory.get('Account')(db)
    co = Factory.get('Constants')(db)

    ac.find_by_name(cereconf.INITIAL_ACCOUNTNAME)
    default_creator_id = ac.entity_id
    default_owner_id = ac.owner_id
    default_owner_type = ac.owner_type
    default_source_system = co.system_sysacc
    valid_contact_types = {
        co.contact_email.str: co.contact_email,
        co.contact_url.str: co.contact_url
    }
    logger.info("Finished caching default values.")

    logger.info('Starting to process accounts')
    for account in sys_acc_parser.system_accounts_cache:
        process_account(db, account, logger, default_owner_id,
                        default_owner_type, default_creator_id,
                        default_source_system, valid_contact_types)
    logger.info('Finished processing accounts')

    if args.commit:
        logger.info("Commiting changes!")
        db.commit()

    else:
        logger.info("Rolling back changes!")
        db.rollback()
コード例 #11
0
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '--studieprogfile',
        required=True,
        help='Read FS study programs (studieprogrammer) from %(metavar)s',
        metavar='<file>',
    )
    parser.add_argument(
        '--undenhfile',
        required=True,
        help='Read FS course units (undervisningsenheter) from %(metavar)s',
        metavar='<file>',
    )
    parser.add_argument(
        '--exportfile',
        required=True,
        help='Write XML to %(metavar)s',
        metavar='<file>',
    )

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    logger.debug("setting studieprogfile to '%s'", args.studieprogfile)
    logger.debug("setting undenhfile to '%s'", args.undenhfile)
    logger.debug("setting exportfile to '%s'", args.exportfile)
    start = mx.DateTime.now()

    db = Factory.get('Database')()
    ac = Factory.get('Account')(db)
    gr = Factory.get('Group')(db)
    co = Factory.get('Constants')(db)

    db.cl_init(change_program='ad_export_autogroups_undervisning')

    get_undenh_file(xmlfile=args.undenhfile)
    get_studieprog_file(xmlfile=args.studieprogfile)
    get_ad_accounts(co, ac)
    get_undenh_groups(db, co, ac, gr)
    get_studieprogramgroups(db, co, ac, gr)
    agrgroup_dict = aggregate_studieprogram_groups(ac)
    write_xml(agrgroup_dict, args.exportfile)

    stop = mx.DateTime.now()
    logger.debug("Started %s, ended %s", start, stop)
    logger.debug("Script running time was %s",
                 (stop - start).strftime("%M minutes %S secs"))
コード例 #12
0
ファイル: job_runner.py プロジェクト: Narvik-kommune/cerebrum
def main(inargs=None):
    parser = make_parser()
    install_subparser(parser)
    args = parser.parse_args(inargs)

    autoconf('daemons', args)

    jr_socket = args.socket
    logger.debug("job_runner args=%r", args)
    logger.debug("job runner socket=%r exists=%r",
                 jr_socket, os.path.exists(jr_socket))

    # Not running a command, so we'll need a config:
    scheduled_jobs = get_job_config(args.config)

    logger.info("Starting daemon with jobs from %r", scheduled_jobs)
    run_daemon(jr_socket, scheduled_jobs)
コード例 #13
0
ファイル: ad_fullsync.py プロジェクト: unioslo/cerebrum
def main(inargs=None):
    parser = make_parser()
    install_subparser(parser)
    args = parser.parse_args(inargs)
    autoconf('cronjob', args)
    logger.debug("args: %r", args)

    if not any((args.user_sync, args.group_sync)):
        raise RuntimeError("nothing to do")

    db = Utils.Factory.get('Database')()
    db.cl_init(change_program="uio_ad_sync")

    if args.user_sync:
        user_sync(db, args)

    if args.group_sync:
        group_sync(db, args)
コード例 #14
0
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    parser = add_commit_args(parser, default=False)

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    db = Factory.get('Database')()
    db.cl_init(change_program='account_affiliation_cleaner')

    clean_acc_affs(db)

    if args.commit:
        db.commit()
        logger.info("Committed all changes")
    else:
        db.rollback()
        logger.info("Dryrun, rolled back changes")
コード例 #15
0
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)

    parser.add_argument(
        '-r',
        action='store_true',
        default=False,
        dest='write_roles',
        help='write role info file',
    )

    parser.add_argument(
        '--role-file',
        metavar='filename',
    )

    parser.add_argument(
        '--db-user',
        default=cereconf.SYS_Y['db_user'],
    )
    parser.add_argument(
        '--db-host',
        default=cereconf.SYS_Y['db_host'],
    )
    parser.add_argument(
        '--db-service',
        default=cereconf.SYS_Y['db_service'],
    )
    logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    logger.info('Start %s', parser.prog)

    sys_y = SystemY(user=args.db_user,
                    database=args.db_service,
                    host=args.db_host)

    if args.write_roles:
        write_role_info(sys_y, args.role_file)

    logger.info('Done %s', parser.prog)
コード例 #16
0
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description='Update automatic groups so that they reflect the '
        'OU-structure of an organization')
    parser.add_argument(
        '--perspective',
        type=six.text_type,
        help='Set the system perspective to fetch the OU structure from, '
        'e.g. SAP or FS.',
        required=True)
    parser.add_argument(
        '--prefix',
        type=six.text_type,
        action='append',
        default=[],
        help='Prefix for the automatic groups this script creates',
        required=True)
    add_commit_args(parser)

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)
    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)
    co = Factory.get('Constants')(db)

    perspective = get_constant(db, parser, co.OUPerspective, args.perspective)
    ou = Factory.get('OU')(db)
    gr = Factory.get('Group')(db)

    logger.info('Caching OUs')
    ou_id2sko = cache_stedkoder(ou)
    for prefix in args.prefix:
        process_prefix(db, ou, gr, co, prefix, perspective, ou_id2sko)

    if args.commit:
        logger.info('Committing changes')
        db.commit()
    else:
        logger.info('Rolling back changes')
        db.rollback()
    logger.info('Done with %s', parser.prog)
コード例 #17
0
ファイル: job_runner.py プロジェクト: unioslo/cerebrum
def main(inargs=None):
    parser = make_parser()
    install_subparser(parser)
    args = parser.parse_args(inargs)

    autoconf('cronjob', args)
    logger.debug("job_runner args=%r", args)
    logger.debug("job runner socket=%r exists=%r",
                 cereconf.JOB_RUNNER_SOCKET,
                 os.path.exists(cereconf.JOB_RUNNER_SOCKET))

    command = None

    # What to do:
    if args.command:
        command = args.command
        c_args = []
    elif args.run_job:
        command = 'RUNJOB'
        c_args = [args.run_job, args.run_with_deps]
    elif args.show_job:
        command = 'SHOWJOB'
        c_args = [args.show_job, ]

    if command:
        logger.debug("job_runner running command=%r, args=%r, timeout=%r",
                     command, c_args, args.timeout)
        print(run_command(command, c_args, args.timeout))
        raise SystemExit(0)

    # Not running a command, so we'll need a config:
    scheduled_jobs = get_job_config(args.config)

    if args.dump_jobs is not None:
        print("Showing jobs in {0!r}".format(scheduled_jobs))
        dump_jobs(scheduled_jobs, args.dump_jobs)
        raise SystemExit(0)

    logger.info("Starting daemon with jobs from %r", scheduled_jobs)
    run_daemon(scheduled_jobs)
コード例 #18
0
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-o', '--out', required=True, dest='outfile')
    parser.add_argument('-s', '--spread', default=default_spread)
    parser.add_argument('-a',
                        '--authoritative_source_system',
                        dest='source',
                        default=default_source)

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    logger.debug("outfile:%s", args.outfile)
    logger.debug("spread: %s", args.spread)
    logger.debug("source: %s", args.source)

    db = Factory.get('Database')()
    db.cl_init(change_program='skype_export')

    data_list = get_data(db, args.spread, args.source)
    write_file(data_list, args.outfile)
コード例 #19
0
def main(inargs=None):
    """Parse arguments and run corresponding functions

    :param inargs: arguments to the parser
    """
    logger = logging.getLogger(__name__)

    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-g', '--grace',
        type=int,
        default=0,
        help="Grace period for person affiliations in days (default: 0)")
    add_commit_args(parser)

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    logger.info("Starting program '%s'", parser.prog)
    logger.info('args: %r', args)

    database = Factory.get('Database')()
    database.cl_init(change_program=parser.prog)

    # Cache default file group of users
    logger.info('Caching default file groups of users')
    posix_user2gid = cache_posix_dfgs(database)
    remove_persons(database, logger, posix_user2gid, args.grace)

    if args.commit:
        logger.info('Committing changes to database')
        database.commit()
    else:
        logger.info('Rolling back changes')
        database.rollback()

    logger.info("Finished program '%s'", parser.prog)
コード例 #20
0
def main(inargs=None):
    # Parse arguments
    parser = make_parser()

    # Setup logging
    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)

    # Do the main logic of the script
    logger.info("Start %s", parser.prog)
    logger.debug("args: %s", repr(args))

    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)
    co = Factory.get('Constants')(db)

    # Find owners of groups expiring between today and limit_1, and groups
    # expiring between limit_1 and limit_2
    today = datetime.date.today()
    limit_1 = today + datetime.timedelta(days=args.limit1)
    limit_2 = today + datetime.timedelta(days=args.limit2)
    logger.info("Finding expiring groups")
    soon_expiring, later_expiring = get_expiring_groups(
        db,
        co,
        today,
        limit_1,
        limit_2
    )

    # Filter out groups that have already been notified
    gr = Factory.get('Group')(db)

    def filter_with_trait(groups, numval=NotSet):
        notified_groups = set(
            row['entity_id'] for row in
            gr.list_traits(code=co.trait_group_expire_notify,
                           numval=numval) if
            row['entity_type'] == co.entity_group
        )
        return groups - notified_groups

    logger.info("Filtering out groups that have been notified")
    # Remove those that have gotten the second warning from the groups in the
    # today < x < limit_1 period
    soon_expiring = filter_with_trait(soon_expiring, 2)
    logger.info("Found %d groups expiring before %s",
                len(soon_expiring), str(limit_1))

    # Remove those that have gotten any warning from the groups in the
    # limit_1 < x < limit_2 period
    later_expiring = filter_with_trait(later_expiring)
    logger.info("Found %d groups expiring between %s and %s",
                len(later_expiring), str(limit_1), str(limit_2))

    # Set traits on the groups whose admins have been notified
    logger.info("Setting traits on groups to notify")
    for group_id in soon_expiring:
        gr.clear()
        gr.find(group_id)
        gr.populate_trait(co.trait_group_expire_notify, numval=2)
        gr.write_db()
        logger.debug("Set trait (numval=2) for group id %s", group_id)
    for group_id in later_expiring:
        gr.clear()
        gr.find(group_id)
        gr.populate_trait(co.trait_group_expire_notify, numval=1)
        gr.write_db()
        logger.debug("Set trait (numval=1) for group id %s", group_id)

    # Get the emails of the group admins and notify them
    if soon_expiring:
        logger.info("Finding emails to admins")
        soon = get_admins_groups_emails(db, soon_expiring)
        logger.info("Notifying admins of groups expiring before %s",
                    str(limit_1))
        send_mails(args,
                   soon,
                   TRANSLATION[DEFAULT_LANGUAGE],
                   timestamp_title(TRANSLATION[DEFAULT_LANGUAGE]['title']))

    if later_expiring:
        logger.info("Finding emails to admins")
        later = get_admins_groups_emails(db, later_expiring)
        logger.info("Notifying admins of groups expiring between %s and %s",
                    str(limit_1), str(limit_2))
        send_mails(args,
                   later,
                   TRANSLATION[DEFAULT_LANGUAGE],
                   timestamp_title(TRANSLATION[DEFAULT_LANGUAGE]['title']))

    # Remove traits for notified groups where the expire date has been
    # extended, so that the group can be notified again in the future.
    logger.info("Cleaning up traits for groups where admins have taken action")
    groups_with_expiring_trait = list(i['entity_id'] for i in
                                      gr.list_traits(
                                          code=co.trait_group_expire_notify)
                                      if i['entity_type'] == co.entity_group)
    for group_id in groups_with_expiring_trait:
        gr.clear()
        gr.find(group_id)
        if gr.expire_date > limit_2:
            gr.delete_trait(co.trait_group_expire_notify)
            logger.debug("Removed trait for group id %s", group_id)
    # Commit or rollback
    if args.commit:
        logger.info("Committing changes")
        db.commit()
    else:
        db.rollback()
        logger.info("Changes rolled back (dryrun)")

    logger.info("DONE %s", parser.prog)
コード例 #21
0
def main():
    global DEBUG

    parser = argparse.ArgumentParser()
    parser.add_argument('-d',
                        '--debug',
                        dest='debug',
                        action='store_true',
                        help='Turn on debugging')
    parser.add_argument(
        '-t',
        '--type',
        dest='types',
        action='append',
        choices=['email', 'sympa', 'move', 'quarantine', 'delete'],
        required=True)
    parser.add_argument('-m',
                        '--max',
                        dest='max_requests',
                        default=999999,
                        help='Perform up to this number of requests',
                        type=int)
    parser.add_argument('-p',
                        '--process',
                        dest='process',
                        action='store_true',
                        help='Perform the queued operations')
    args, _rest = parser.parse_known_args()

    has_move_arg = 'move' in args.types
    arg_group = parser.add_argument_group('Required for move_student requests')
    arg_group.add_argument('--ou-perspective',
                           dest='ou_perspective',
                           default='perspective_fs')
    arg_group.add_argument('--emne-info-file',
                           dest='emne_info_file',
                           default=None,
                           required=has_move_arg)
    arg_group.add_argument('--studconfig-file',
                           dest='studconfig_file',
                           default=None,
                           required=has_move_arg)
    arg_group.add_argument('--studie-progs-file',
                           dest='studieprogs_file',
                           default=None,
                           required=has_move_arg)
    arg_group.add_argument('--student-info-file',
                           dest='student_info_file',
                           default=None,
                           required=has_move_arg)

    logutils.options.install_subparser(parser)
    args = parser.parse_args()
    logutils.autoconf('bofhd_req', args)

    DEBUG = args.debug

    logger.info('Start %s', parser.prog)
    logger.debug('args: %r', args)

    if args.process:
        if has_move_arg:
            logger.info('Processing move requests')
            # Asserting that a legal value is assigned to args.ou_perspective
            args.ou_perspective = get_constant(db, parser, const.OUPerspective,
                                               args.ou_perspective)
            msp = process_requests.MoveStudentProcessor(
                db,
                const,
                args.ou_perspective,
                args.emne_info_file,
                args.studconfig_file,
                args.studieprogs_file,
                default_spread=default_spread,
            )
            # Convert move_student requests into move_user requests
            msp.process_requests(args.student_info_file)

        logger.info('Processing regular requests')
        rp = process_requests.RequestProcessor(db, const)
        rp.process_requests(operations_map, args.types, args.max_requests)

    logger.info('Done %s', parser.prog)
コード例 #22
0
ファイル: join_persons.py プロジェクト: unioslo/cerebrum
def main():
    parser = argparse.ArgumentParser(
        description='''Merges all information about a person identified by 
        entity_id into the new person, not overwriting existing values in 
        new person.  The old_person entity is permanently removed from the
         database.''')

    # Add commit/dryrun arguments
    parser = add_commit_args(parser)

    parser.add_argument(
        '--old',
        help='Old entity_id',
        required=True,
        type=int)
    parser.add_argument(
        '--new',
        help='New entity_id',
        required=True,
        type=int)
    parser.add_argument(
        '--pq-uio',
        dest='with_uio_pq',
        help="Transfer uio-printerquotas",
        action='store_true')
    parser.add_argument(
        '--pq-uia',
        dest='with_uia_pq',
        help='Transfer uia-printerquotas',
        action='store_true')
    parser.add_argument(
        '--ephorte-uio',
        dest='with_uio_ephorte',
        help='transfer uio-ephorte roles',
        action='store_true')
    parser.add_argument(
        '--voip-uio',
        dest='with_uio_voip',
        help='transfer voip objects',
        action='store_true')

    logutils.options.install_subparser(parser)
    args = parser.parse_args()
    logutils.autoconf('tee', args)

    logger.info('Start of script %s', parser.prog)
    logger.debug('args: %r', args)

    #
    # Initialize globals
    #
    db = Factory.get('Database')()
    db.cl_init(change_program="join_persons")

    old_person = Factory.get('Person')(db)
    old_person.find(args.old)
    new_person = Factory.get('Person')(db)
    new_person.find(args.new)
    person_join(old_person, new_person, args.with_uio_pq, args.with_uia_pq,
                args.with_uio_ephorte, args.with_uio_voip, db)
    old_person.delete()

    if args.commit:
        db.commit()
        logger.info('Changes were committed to the database')
    else:
        db.rollback()
        logger.info('Dry run. Changes to the database were rolled back')

    logger.info('Done with script %s', parser.prog)
コード例 #23
0
def main():
    global DEBUG

    parser = argparse.ArgumentParser()
    parser.add_argument(
        '-d', '--debug',
        dest='debug',
        action='store_true',
        help='Turn on debugging')
    parser.add_argument(
        '-t', '--type',
        dest='types',
        action='append',
        choices=['email', 'sympa', 'move', 'quarantine', 'delete'],
        required=True)
    parser.add_argument(
        '-m', '--max',
        dest='max_requests',
        default=999999,
        help='Perform up to this number of requests',
        type=int)
    parser.add_argument(
        '-p', '--process',
        dest='process',
        action='store_true',
        help='Perform the queued operations')
    args, _rest = parser.parse_known_args()

    has_move_arg = 'move' in args.types
    arg_group = parser.add_argument_group('Required for move_student requests')
    arg_group.add_argument(
        '--ou-perspective',
        dest='ou_perspective',
        default='perspective_fs'
    )
    arg_group.add_argument(
        '--emne-info-file',
        dest='emne_info_file',
        default=None,
        required=has_move_arg
    )
    arg_group.add_argument(
        '--studconfig-file',
        dest='studconfig_file',
        default=None,
        required=has_move_arg
    )
    arg_group.add_argument(
        '--studie-progs-file',
        dest='studieprogs_file',
        default=None,
        required=has_move_arg
    )
    arg_group.add_argument(
        '--student-info-file',
        dest='student_info_file',
        default=None,
        required=has_move_arg
    )

    logutils.options.install_subparser(parser)
    args = parser.parse_args()
    logutils.autoconf('bofhd_req', args)

    DEBUG = args.debug

    logger.info('Start of script %s', parser.prog)
    logger.debug('args: %r', args)

    if args.process:
        if has_move_arg:
            # Asserting that a legal value is assigned to args.ou_perspective
            args.ou_perspective = get_constant(db, parser, const.OUPerspective,
                                               args.ou_perspective)
            msp = process_requests.MoveStudentProcessor(
                db,
                const,
                args.ou_perspective,
                args.emne_info_file,
                args.studconfig_file,
                args.studieprogs_file,
                default_spread=default_spread,
            )
            # Convert move_student requests into move_user requests
            msp.process_requests(args.student_info_file)

        rp = process_requests.RequestProcessor(db, const)
        rp.process_requests(operations_map, args.types, args.max_requests)