def main():
    parser = argparse.ArgumentParser(description=__doc__, )
    parser.add_argument(
        '--user_spread',
        dest='user_spread',
        required=True,
    )
    parser.add_argument(
        '-p',
        '--passwd',
        dest='passwd_file',
        required=True,
        help='Write a passwd file to %(metavar)s',
        metavar='FILE',
    )
    parser.add_argument(
        '-s',
        '--shadow',
        dest='shadow_file',
        default=None,
        help='Optionally split out passwords to %(metavar)s',
        metavar='FILE',
    )
    parser.add_argument(
        '-a',
        '--auth_method',
        dest='auth_method',
        default=None,
        help=("If not given, passwords are replaced with 'x'"
              " in the password file"),
    )
    parser.add_argument(
        '--eof',
        dest='e_o_f',
        action='store_true',
        help='End dump file with E_O_F to mark successful completion')

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args()
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start %s', parser.prog)
    logger.debug('args: %r', args)

    db = Utils.Factory.get('Database')()
    co = Utils.Factory.get('Constants')(db)

    if args.auth_method:
        args.auth_method = get_constant(db, parser, co.Authentication,
                                        args.auth_method)
    args.user_spread = get_constant(db, parser, co.Spread, args.user_spread)

    p = Passwd(args.auth_method, args.user_spread)
    p.write_passwd(args.passwd_file, args.shadow_file, args.e_o_f)

    logger.info('passwd written to %s', args.passwd_file)
    if args.shadow_file:
        logger.info('shadow written to %s', args.shadow_file)
    logger.info('Done %s', parser.prog)
def main(inargs=None):
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '-o', '--output',
        metavar='FILE',
        type=argparse.FileType('w'),
        default='-',
        help='The file to print the report to, defaults to stdout')
    parser.add_argument(
        '-f', '--output-format',
        choices=FORMATS.keys(),
        default=DEFAULT_FORMAT,
        help='Output file format, defaults to %(default)s')
    parser.add_argument(
        '-e', '--encoding',
        dest='codec',
        default=DEFAULT_ENCODING,
        type=codec_type,
        help="Output file encoding, defaults to %(default)s")
    spread_arg = parser.add_argument(
        '-s', '--spread',
        metavar='SPREAD',
        default=DEFAULT_SPREAD,
        help='Spread to filter users by, defaults to %(default)s')
    parser.add_argument(
        '-a', '--check-accounts',
        action='store_true',
        default=False,
        help='Find accounts without affiliations, but where the owner'
             ' has affiliations.  The default is to find accounts of persons'
             ' without affiliations')

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    # Initialization of database connection
    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)

    spread = get_constant(db, parser, co.Spread, args.spread, spread_arg)

    logger.info('Start of script %s', parser.prog)
    logger.debug("args: %r", args)
    logger.info("spread: %s", text_type(spread))

    # Search for accounts without affiliations
    no_aff = get_accs_wo_affs(db, spread, args.check_accounts)

    # Generate report of results
    writer = FORMATS[args.output_format]
    writer(args.output, args.codec, no_aff, args.check_accounts)
    args.output.flush()

    # If the output is being written to file, close the filehandle
    if args.output is not sys.stdout:
        args.output.close()

    logger.info('Report written to %s', args.output.name)
    logger.info('Done with script %s', parser.prog)
Beispiel #3
0
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description='Generate host netgroup ldif', )
    parser.add_argument(
        "-l",
        "--ldif",
        dest="filename",
        required=True,
        help="Write ldif data to %(metavar)s",
        metavar="<file>",
    )
    spread_arg = parser.add_argument(
        "-H",
        "--host-netgroup-spread",
        dest="spread",
        required=True,
        help="Filter host netgroups by %(metavar)s",
        metavar="<spread>",
    )
    zone_arg = parser.add_argument(
        "-z",
        "--zone",
        dest="zone",
        required=True,
        help="Zone to use for host netgroups.",
        metavar="<zone>",
    )

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)

    Cerebrum.logutils.autoconf('cronjob', args)
    logger.info('Start %s', parser.prog)
    logger.debug('args: %r', args)

    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)

    spread = get_constant(db, parser, co.Spread, args.spread, spread_arg)
    zone = get_constant(db, parser, co.DnsZone, args.zone, zone_arg)

    export = HostGroupExport(db)
    export.main(args.filename, spread, zone)

    logger.info('Done %s', parser.prog)
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="Generate a report on persons and accounts")

    parser.add_argument('-f',
                        '--file',
                        dest='output',
                        metavar='FILE',
                        type=argparse.FileType('w'),
                        default='-',
                        help='Output file for report, defaults to stdout')
    parser.add_argument('-e',
                        '--encoding',
                        dest='codec',
                        default=DEFAULT_ENCODING,
                        type=codec_type,
                        help="Output file encoding, defaults to %(default)s")

    id_source_arg = parser.add_argument(
        '-s',
        '--fnr-systems',
        dest='id_source_systems',
        type=lambda v: [s.strip() for s in v.split(',')],
        help='Ordered, comma-separated list of external id preference,'
        ' defaults to cereconf.SYSTEM_LOOKUP_ORDER')

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start of script %s', parser.prog)
    logger.debug("args: %r", args)

    db = Factory.get("Database")()
    co = Factory.get("Constants")(db)

    source_systems = [
        get_constant(db, parser, co.AuthoritativeSystem, value, id_source_arg)
        for value in (args.id_source_systems or cereconf.SYSTEM_LOOKUP_ORDER)
    ]
    logger.debug("source_systems: %r", source_systems)

    account_iter = get_account_data(db, co.externalid_fodselsnr,
                                    source_systems)

    write_report(args.output, args.codec, account_iter)

    args.output.flush()
    if args.output is not sys.stdout:
        args.output.close()

    logger.info('Report written to %s', args.output.name)
    logger.info('Done with script %s', parser.prog)
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="Generate a report on persons and accounts")

    parser.add_argument(
        '-f', '--file',
        dest='output',
        metavar='FILE',
        type=argparse.FileType('w'),
        default='-',
        help='Output file for report, defaults to stdout')
    parser.add_argument(
        '-e', '--encoding',
        dest='codec',
        default=DEFAULT_ENCODING,
        type=codec_type,
        help="Output file encoding, defaults to %(default)s")

    id_source_arg = parser.add_argument(
        '-s', '--fnr-systems',
        dest='id_source_systems',
        type=lambda v: [s.strip() for s in v.split(',')],
        help='Ordered, comma-separated list of external id preference,'
             ' defaults to cereconf.SYSTEM_LOOKUP_ORDER')

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start of script %s', parser.prog)
    logger.debug("args: %r", args)

    db = Factory.get("Database")()
    co = Factory.get("Constants")(db)

    source_systems = [
        get_constant(db, parser, co.AuthoritativeSystem, value, id_source_arg)
        for value in (args.id_source_systems or cereconf.SYSTEM_LOOKUP_ORDER)]
    logger.debug("source_systems: %r", source_systems)

    account_iter = get_account_data(db,
                                    co.externalid_fodselsnr,
                                    source_systems)

    write_report(args.output, args.codec, account_iter)

    args.output.flush()
    if args.output is not sys.stdout:
        args.output.close()

    logger.info('Report written to %s', args.output.name)
    logger.info('Done with script %s', parser.prog)
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-o', '--output',
        metavar='FILE',
        type=argparse.FileType('w'),
        default='-',
        help='output file for report, defaults to stdout')
    parser.add_argument(
        '-e', '--encoding',
        dest='codec',
        default=DEFAULT_ENCODING,
        type=codec_type,
        help="output file encoding, defaults to %(default)s")
    spread_arg = parser.add_argument(
        '-s', '--spread',
        dest='spread',
        required=True,
        help='name of spread to filter accounts by')
    parser.add_argument(
        '-i', '--include-expired',
        action='store_true',
        default=False,
        help='include expired accounts')

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)

    spread = get_constant(db, parser, co.Spread, args.spread, spread_arg)

    logger.info('Start of script %s', parser.prog)
    logger.debug("args: %r", args)
    logger.info("spread: %s", text_type(spread))

    accounts = list(get_accounts_wo_primary_addr(db,
                                                 spread,
                                                 args.include_expired))

    title = ('Accounts without primary email and spread=%s' %
             text_type(spread))
    write_html_report(args.output, args.codec, title, accounts)

    args.output.flush()
    if args.output is not sys.stdout:
        args.output.close()

    logger.info('Report written to %s', args.output.name)
    logger.info('Done with script %s', parser.prog)
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-o', '--output',
        metavar='FILE',
        type=argparse.FileType('w'),
        default='-',
        help='output file for report, defaults to stdout')
    parser.add_argument(
        '-e', '--encoding',
        dest='codec',
        default=DEFAULT_ENCODING,
        type=codec_type,
        help="output file encoding, defaults to %(default)s")
    aff_arg = parser.add_argument(
        '--aff-status',
        dest='status',
        required=True,
        help='Lists persons with this affiliation status')

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    db = Factory.get(b'Database')()
    co = Factory.get(b'Constants')(db)

    aff_status = get_constant(db, parser, co.PersonAffStatus, args.status,
                              aff_arg)

    logger.info('Start of script %s', parser.prog)
    logger.debug("args: %r", args)
    logger.info("aff-status: %s", text_type(aff_status))

    persons = persons_with_aff_status(db, aff_status)
    sorted_persons = sorted(persons,
                            key=lambda x: (x['ou_sko'],
                                           x['account_name']))

    write_html_report(args.output, args.codec, sorted_persons, aff_status)

    args.output.flush()
    if args.output is not sys.stdout:
        args.output.close()

    logger.info('Report written to %s', args.output.name)
    logger.info('Done with script %s', parser.prog)
Beispiel #8
0
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-o',
                        '--output',
                        metavar='FILE',
                        type=argparse.FileType('w'),
                        default='-',
                        help='output file for report, defaults to stdout')
    parser.add_argument('-e',
                        '--encoding',
                        dest='codec',
                        default=DEFAULT_ENCODING,
                        type=codec_type,
                        help="output file encoding, defaults to %(default)s")
    aff_arg = parser.add_argument(
        '--aff-status',
        dest='status',
        required=True,
        help='Lists persons with this affiliation status')

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    db = Factory.get(b'Database')()
    co = Factory.get(b'Constants')(db)

    aff_status = get_constant(db, parser, co.PersonAffStatus, args.status,
                              aff_arg)

    logger.info('Start of script %s', parser.prog)
    logger.debug("args: %r", args)
    logger.info("aff-status: %s", text_type(aff_status))

    persons = persons_with_aff_status(db, aff_status)
    sorted_persons = sorted(persons,
                            key=lambda x: (x['ou_sko'], x['account_name']))

    write_html_report(args.output, args.codec, sorted_persons, aff_status)

    args.output.flush()
    if args.output is not sys.stdout:
        args.output.close()

    logger.info('Report written to %s', args.output.name)
    logger.info('Done with script %s', parser.prog)
Beispiel #9
0
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-o',
                        '--output',
                        metavar='FILE',
                        type=argparse.FileType('w'),
                        default='-',
                        help='output file for report, defaults to stdout')
    parser.add_argument('-e',
                        '--encoding',
                        dest='codec',
                        default=DEFAULT_ENCODING,
                        type=codec_type,
                        help="output file encoding, defaults to %(default)s")
    diff_arg = parser.add_argument('source_system',
                                   nargs=2,
                                   metavar='SYSTEM',
                                   help='source system to diff')

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('console', args)

    logger.info('Start of script %s', parser.prog)
    logger.debug("args: %r", args)

    db = Factory.get("Database")()
    co = Factory.get("Constants")(db)

    systems = [
        get_constant(db, parser, co.AuthoritativeSystem, value, diff_arg)
        for value in args.source_system
    ]
    logger.debug("systems: %r", systems)

    output_diff(args.output, args.codec, generate_diff(db, *systems))

    args.output.flush()
    if args.output is not sys.stdout:
        args.output.close()

    logger.info('Report written to %s', args.output.name)
    logger.info('Done with script %s', parser.prog)
Beispiel #10
0
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description='Update automatic groups so that they reflect the '
        'OU-structure of an organization')
    parser.add_argument(
        '--perspective',
        type=six.text_type,
        help='Set the system perspective to fetch the OU structure from, '
        'e.g. SAP or FS.',
        required=True)
    parser.add_argument(
        '--prefix',
        type=six.text_type,
        action='append',
        default=[],
        help='Prefix for the automatic groups this script creates',
        required=True)
    add_commit_args(parser)

    logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    logutils.autoconf('cronjob', args)
    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)
    co = Factory.get('Constants')(db)

    perspective = get_constant(db, parser, co.OUPerspective, args.perspective)
    ou = Factory.get('OU')(db)
    gr = Factory.get('Group')(db)

    logger.info('Caching OUs')
    ou_id2sko = cache_stedkoder(ou)
    for prefix in args.prefix:
        process_prefix(db, ou, gr, co, prefix, perspective, ou_id2sko)

    if args.commit:
        logger.info('Committing changes')
        db.commit()
    else:
        logger.info('Rolling back changes')
        db.rollback()
    logger.info('Done with %s', parser.prog)
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="Export quarantines in JSON-format to file")
    parser.add_argument('--outfile',
                        dest='output',
                        type=argparse.FileType('w'),
                        default='-',
                        metavar='FILE',
                        help='Output file for report, defaults to stdout')
    q_arg = parser.add_argument(
        '--quarantines',
        nargs='+',
        required=True,
        help="Quarantines that should be exported (i.e. 'radius vpn')")

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)
    quarantines = [
        get_constant(db, parser, co.Quarantine, q, q_arg)
        for q in args.quarantines
    ]

    logger.info('Start of script %s', parser.prog)
    logger.debug("quarantines: %r", quarantines)

    quarantines = codes_to_human(db, get_quarantines(db, quarantines))
    write_report(args.output, quarantines)

    if args.output is not sys.stdout:
        args.output.close()

    logger.info('Report written to %s', args.output.name)
    logger.info('Done with script %s', parser.prog)
Beispiel #12
0
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="Export quarantines in JSON-format to file")
    parser.add_argument(
        '--outfile',
        dest='output',
        type=argparse.FileType('w'),
        default='-',
        metavar='FILE',
        help='Output file for report, defaults to stdout')
    q_arg = parser.add_argument(
        '--quarantines',
        nargs='+',
        required=True,
        help="Quarantines that should be exported (i.e. 'radius vpn')")

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)
    quarantines = [get_constant(db, parser, co.Quarantine, q, q_arg)
                   for q in args.quarantines]

    logger.info('Start of script %s', parser.prog)
    logger.debug("quarantines: %r", quarantines)

    quarantines = codes_to_human(db, get_quarantines(db, quarantines))
    write_report(args.output, quarantines)

    if args.output is not sys.stdout:
        args.output.close()

    logger.info('Report written to %s', args.output.name)
    logger.info('Done with script %s', parser.prog)
def main(inargs=None):
    parser = argparse.ArgumentParser(description='')

    source_args = parser.add_argument_group('Report parameters')
    ss_arg = source_args.add_argument(
        '-s', '--source-system',
        required=True)
    affs_arg = source_args.add_argument(
        '-u', '--person-user-mismatch',
        action='append',
        default=[],
        dest='user_select',
        help='Report on user accounts for persons with %(metavar)s',
        metavar='AFFILIATION')
    rule_arg = source_args.add_argument(
        '-g', '--user-group-mismatch',
        action='append',
        default=[],
        dest='group_select',
        type=group_rule,
        help='Report on users missing for the affiliation -> group rule',
        metavar='AFF:status:sko:group')

    file_args = parser.add_argument_group('Report file')
    file_args.add_argument(
        '-f', '--output-file',
        type=argparse.FileType('w'),
        default=None,
        help="Write report to file, '-' for stdout")
    file_args.add_argument(
        '-e', '--output-encoding',
        type=codec_type,
        default=DEFAULT_FILE_ENCODING,
        help="Write file using the given encoding (default: %(default)s)")

    mail_args = parser.add_argument_group('Report email')
    to_arg = mail_args.add_argument(
        '--to',
        action='append',
        default=[],
        dest='mail_to',
        type=email_address,
        help="Send an email report to %(metavar)s",
        metavar='ADDR')
    cc_arg = mail_args.add_argument(
        '--cc',
        action='append',
        default=[],
        dest='mail_cc',
        type=email_address,
        help="Also send email report to %(metavar)s",
        metavar='ADDR')
    from_arg = mail_args.add_argument(
        '--from',
        default=DEFAULT_MAIL_FROM,
        dest='mail_from',
        type=email_address,
        help="Send reports from %(metavar)s",
        metavar='ADDR')
    subj_arg = mail_args.add_argument(
        '--subject',
        dest='mail_subject',
        type=UnicodeType(),
        help="Also send email report to %(metavar)s",
        metavar='ADDR')
    mail_args.add_argument(
        '--encoding',
        dest='mail_codec',
        type=codec_type,
        default=DEFAULT_MAIL_ENCODING,
        help="Charset to use for email (default: %(default)s")

    def check_mail_args(args):
        args_set = [arg for arg in (to_arg, from_arg, subj_arg, cc_arg)
                    if getattr(args, arg.dest) != arg.default]
        args_missing = [arg for arg in (to_arg, from_arg, subj_arg)
                        if not bool(getattr(args, arg.dest))]
        if len(args_set) > 0 and len(args_missing) > 0:
            parser.error(argparse.ArgumentError(
                args_set[0],
                "missing {0}".format(
                    ', '.join('/'.join(m.option_strings)
                              for m in args_missing))))
        return len(args_set) > 0

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    send_mail = check_mail_args(args)

    if not any((args.user_select, args.group_select)):
        parser.error("No selections given - nothing to do")

    if not any((send_mail, args.output_file)):
        parser.error("No destination for report - nothing to do")

    db = Factory.get("Database")()
    co = Factory.get("Constants")(db)

    source_system = get_constant(db, parser, co.AuthoritativeSystem,
                                 args.source_system, ss_arg)
    user_affs = [get_constant(db, parser, co.PersonAffiliation, c, affs_arg)
                 for c in args.user_select]
    group_rules = make_rules(db, parser, args.group_select, rule_arg)

    # Start of script
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start of script %s', parser.prog)
    logger.debug('args: %r', args)
    logger.info("Affiliations for user report: %r", args.user_select)
    logger.info("Rules for group report: %r", args.group_select)
    logger.info("Sending email report: %r", send_mail)
    if send_mail:
        logger.debug("mail to      : %r", args.mail_to)
        logger.debug("mail cc      : %r", args.mail_cc)
        logger.debug("mail from    : %r", args.mail_from)
        logger.debug("mail subject : %r", args.mail_subject)

    reports = []

    if group_rules:
        reports.append(make_group_report(db, source_system, group_rules))

    if user_affs:
        reports.append(make_user_report(db, source_system, user_affs))

    if not reports:
        logger.warning("Nothing to report")

    report = format_report("\n".join(reports))

    if reports and send_mail:
        send_report(args, report)

    if args.output_file:
        write_report(args.output_file, report)
        if args.output_file is not sys.stdout:
            args.output_file.close()
        logger.info("Wrote report to %r", args.output_file.name)
def main(inargs=None):
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description=__doc__)
    parser.add_argument(
        '-o', '--output',
        metavar='FILE',
        type=argparse.FileType('w'),
        default='-',
        help='output file for report, defaults to stdout')
    parser.add_argument(
        '-e', '--encoding',
        dest='codec',
        default=DEFAULT_ENCODING,
        type=codec_type,
        help="output file encoding, defaults to %(default)s")

    parser.add_argument(
        '--min',
        dest='minimum',
        type=lambda x: abs(int(x)),
        default=1,
        metavar='MIN',
        help='Report persons with more than %(metavar)s users'
             ' (default: %(default)s)')
    parser.add_argument(
        '--max',
        dest='maximum',
        type=lambda x: abs(int(x)),
        default=None,
        metavar='MAX',
        help='Report persons with less than %(metavar)s users'
             ' (default: no limit)')

    source_arg = parser.add_argument(
        '--source_systems',
        default=DEFAULT_SOURCE,
        help="comma separated list of source systems to search through,"
             " defaults to %(default)s")

    mail_to_arg = parser.add_argument(
        '-t', '--mail-to',
        dest='mail_to',
        metavar='ADDR',
        help="Send an email report to %(metavar)s")
    mail_from_arg = parser.add_argument(
        '-f', '--mail-from',
        dest='mail_from',
        metavar='ADDR',
        help="Send reports from %(metavar)s")

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    # Require mail_to and mail_from, or neither
    if bool(args.mail_from) ^ bool(args.mail_to):
        apply_to = mail_to_arg if args.mail_to else mail_from_arg
        missing = mail_from_arg if args.mail_to else mail_to_arg
        parser.error(argparse.ArgumentError(
            apply_to,
            "Must set {0} as well".format('/'.join(missing.option_strings))))

    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)

    src = [get_constant(db, parser, co.AuthoritativeSystem, code, source_arg)
           for code in args.source_systems.split(',')]

    logger.info('Start of script %s', parser.prog)
    logger.debug("args: %r", args)
    logger.debug("source_systems: %r", src)

    stats = collections.defaultdict(int)
    persons = list(get_persons_by_sko(db, src, args.minimum, args.maximum,
                                      stats))

    write_html_report(args.output, args.codec, persons, stats, args.minimum,
                      args.maximum)

    args.output.flush()
    if args.output is not sys.stdout:
        args.output.close()
    logger.info('Report written to %s', args.output.name)

    if args.mail_to:
        subject = "Report from %s" % parser.prog
        body = make_email_report(persons, args.minimum, args.maximum, stats)
        logger.debug('Sending report to %r (%r)', args.mail_to, subject)
        sendmail(args.mail_to, args.mail_from, subject, body)

    logger.info('Done with script %s', parser.prog)
Beispiel #15
0
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="Generate a scientific employments XML file", )
    parser.add_argument(
        '-p',
        '--person-file',
        dest='in_filename',
        required=True,
        help='Read and import persons from %(metavar)s (%(default)s)',
        metavar='<filename>',
    )
    parser.add_argument(
        '-o',
        '--out-file',
        dest='out_filename',
        required=True,
        help='Write XML file to %(metavar)s (%(default)s)',
        metavar='<filename>',
    )
    aff_status_arg = parser.add_argument(
        '-a',
        '--aff-status',
        dest='aff_status',
        action='append',
        help=('Add a person affiliation status to be included in the output.'
              'The argument can be repeated. If no aff-status arguments are '
              'given, %s will be used as a default' % (default_aff_status, )),
        metavar='<aff>',
    )

    add_commit_args(parser)
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start of %s', parser.prog)
    logger.debug('args: %r', args)

    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)

    aff_status = [
        get_constant(db, parser, co.PersonAffStatus, v, aff_status_arg)
        for v in (args.aff_status or (default_aff_status, ))
    ]
    logger.info("Affiliations: %r", map(six.text_type, aff_status))

    person_file = args.in_filename
    out_file = args.out_filename

    if not os.path.exists(person_file):
        raise IOError("Input file %r does not exist" % (person_file, ))
    if not out_file:
        raise ValueError("Invalid output filename %r" % (out_file, ))

    # generate personlist from BAS
    persons = list(get_persons(db, aff_status))

    # read paga file
    employee_data = {}
    for d in read_employee_data(person_file):
        employee_data.setdefault(d['fnr'], []).append(d)

    # Select all persons that qualify accoring to paga_data
    logger.info("Filtering by employment type...")
    qualified_list = list(filter_employees(persons, employee_data))
    logger.info("Found %d qualified", len(qualified_list))

    # write xml file
    write_xml(qualified_list, out_file)
    logger.info("Wrote employee groups to %r", out_file)
Beispiel #16
0
def main():
    """Argparser and script run."""
    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawTextHelpFormatter
    )
    parser.add_argument(
        '-p', '--person-affiliations',
        dest='person_affs',
        action='append',
        required=True,
        help='List of person affiliations to use. On the form <affiliation> '
             'or <affiliation>/<status>. '
             'affiliation_ansatt/affiliation_status_ansatt_vit'
    )
    parser.add_argument(
        '-f', '--fagperson-affiliation',
        dest='fagperson_affs',
        action='append',
        required=True,
        help='TODO Fagperson aff'
    )
    parser.add_argument(
        '-a', '--authoritative-system',
        dest='authoritative_system',
        required=True,
        help='TODO Authoritative system'
    )
    parser.add_argument(
        '-o', '--ou-perspective',
        dest='ou_perspective',
        required=True,
        help='TODO The OU perspective'
    )
    parser.add_argument(
        '-e', '--fagperson-fields',
        dest='fagperson_fields',
        action='append',
        choices=['work_title', 'phone', 'fax', 'mobile'],
        help='Fagperson data fields to be exported. Default is all'
    )
    parser.add_argument(
        '-n', '--no-extra-fields',
        action='store_true',
        dest='no_extra_fields',
        help='Do not export any of the "extra" fagperson fields (work_title, '
             'phone, fax, mobile)'
    )
    parser.add_argument(
        '-m', '--with-cache-email',
        action='store_true',
        dest='email_cache',
        help='Cache e-mail addresses'
    )
    parser.add_argument(
        '-c', '--commit',
        action='store_true',
        dest='commit',
        help='Write data to FS'
    )

    db = Factory.get("Database")()
    co = Factory.get("Constants")(db)
    fs = make_fs()

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args()
    Cerebrum.logutils.autoconf('cronjob', args)
    logger.info('START {0}'.format(parser.prog))

    def parse_affiliation_string(affiliation):
        """Splits string into aff and status."""
        if affiliation is None:
            return None

        if len(affiliation.split("/")) == 1:
            aff, status = (
                co.human2constant(affiliation, co.PersonAffiliation),
                None)

        elif len(affiliation.split("/")) == 2:
            aff, status = affiliation.split("/")
            aff, status = (co.human2constant(aff, co.PersonAffiliation),
                           co.human2constant(status, co.PersonAffStatus))

            if aff is None or status is None:
                return None
        else:
            logger.error("Wrong syntax for affiliation %s", affiliation)
            return None

        return aff, status

    person_affs = [parse_affiliation_string(x) for x in args.person_affs]
    fagperson_affs = [parse_affiliation_string(x) for x in
                      args.fagperson_affs]

    ou_perspective = get_constant(db, parser, co.OUPerspective,
                                  args.ou_perspective)
    authoritative_system = get_constant(db, parser, co.AuthoritativeSystem,
                                        args.authoritative_system)

    if ou_perspective is None:
        logger.error('No valid OU perspective given')
        return None

    if authoritative_system is None:
        logger.error('No valid authoritative system given')
        return None

    if args.commit:
        logger.info('Changes will be committed')
    else:
        logger.info('Dryrun mode, no changes will be committed')

    valid_fagperson_fields = ['work_title', 'phone', 'fax', 'mobile']

    if args.no_extra_fields:
        fagperson_fields = {x: False for x in valid_fagperson_fields}
    elif args.fagperson_fields:
        fagperson_fields = {x: False for x in valid_fagperson_fields}
        for field in args.fagperson_fields:
            if field in fagperson_fields:
                fagperson_fields[field] = True
    else:
        fagperson_fields = None

    syncer = HR2FSSyncer(person_affs, fagperson_affs, authoritative_system,
                         ou_perspective, db, fs, co,
                         fagperson_export_fields=fagperson_fields,
                         use_cache=True, email_cache=args.email_cache,
                         commit=args.commit)

    syncer.sync_to_fs()

    if args.commit:
        logger.info('Committing FS db')
        fs.db.commit()
    else:
        logger.info('Rolling back changes in the FS db')
        fs.db.rollback()
    logger.info('Done syncing to FS')
def main(inargs=None):
    parser = argparse.ArgumentParser(description="Remove expired spreads", )
    spread_arg = parser.add_argument(
        '-s',
        '--spread',
        dest='spreads',
        action='append',
        help='Add a spread type to remove (default: all types)',
        metavar='<spread>',
    )
    parser.add_argument(
        '--days',
        dest='days',
        type=int,
        default=0,
        help='Set cutoff date to %(metavar)s days ago (default: %(default)s)',
        metavar='<days>',
    )

    add_commit_args(parser)
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start of %s', parser.prog)
    logger.debug('args: %r', args)

    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)
    co = Factory.get('Constants')(db)
    spread_expire = SpreadExpire(db)

    if args.spreads:
        spreads = tuple(
            get_constant(db, parser, SpreadCode, value, spread_arg)
            for value in args.spreads)
        # TODO: Throw error if spread.entity_type is wrong?
    else:
        spreads = tuple(s for s in co.fetch_constants(SpreadCode)
                        if s.entity_type == co.entity_account)
    logger.info("Spreads: %r", map(six.text_type, spreads))

    cutoff = datetime.date.today() - datetime.timedelta(days=args.days)
    logger.info("Start date: %r", cutoff)

    for spread in spreads:
        if spread == co.spread_uit_exchange:
            logger.info("Skipping spread=%s", spread)
            continue
        elif spread.entity_type != co.entity_account:
            logger.info("Skipping spread=%s for entity_type=%s", spread,
                        spread.entity_type)
            continue
        else:
            logger.info("Processing spread: %s", spread)
        delete_spread(db, spread_expire, spread, cutoff)

    if args.commit:
        logger.info('Commiting changes')
        db.commit()
    else:
        logger.info('Rolling back changes')
        db.rollback()
    logger.info('Done %s', parser.prog)
Beispiel #18
0
def main():
    """Argparser and script run."""
    parser = argparse.ArgumentParser(
        description=__doc__, formatter_class=argparse.RawTextHelpFormatter)
    parser.add_argument(
        '-p',
        '--person-affiliations',
        dest='person_affs',
        action='append',
        required=True,
        help='List of person affiliations to use. On the form <affiliation> '
        'or <affiliation>/<status>. '
        'affiliation_ansatt/affiliation_status_ansatt_vit')
    parser.add_argument('-f',
                        '--fagperson-affiliation',
                        dest='fagperson_affs',
                        action='append',
                        required=True,
                        help='TODO Fagperson aff')
    parser.add_argument('-a',
                        '--authoritative-system',
                        dest='authoritative_system',
                        required=True,
                        help='TODO Authoritative system')
    parser.add_argument('-o',
                        '--ou-perspective',
                        dest='ou_perspective',
                        required=True,
                        help='TODO The OU perspective')
    parser.add_argument(
        '-e',
        '--fagperson-fields',
        dest='fagperson_fields',
        action='append',
        choices=['work_title', 'phone', 'fax', 'mobile'],
        help='Fagperson data fields to be exported. Default is all')
    parser.add_argument(
        '-n',
        '--no-extra-fields',
        action='store_true',
        dest='no_extra_fields',
        help='Do not export any of the "extra" fagperson fields (work_title, '
        'phone, fax, mobile)')
    parser.add_argument('-m',
                        '--with-cache-email',
                        action='store_true',
                        dest='email_cache',
                        help='Cache e-mail addresses')
    parser.add_argument('-c',
                        '--commit',
                        action='store_true',
                        dest='commit',
                        help='Write data to FS')

    db = Factory.get("Database")()
    co = Factory.get("Constants")(db)
    fs = make_fs()

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args()
    Cerebrum.logutils.autoconf('cronjob', args)
    logger.info('START {0}'.format(parser.prog))

    def parse_affiliation_string(affiliation):
        """Splits string into aff and status."""
        if affiliation is None:
            return None

        if len(affiliation.split("/")) == 1:
            aff, status = (co.human2constant(affiliation,
                                             co.PersonAffiliation), None)

        elif len(affiliation.split("/")) == 2:
            aff, status = affiliation.split("/")
            aff, status = (co.human2constant(aff, co.PersonAffiliation),
                           co.human2constant(status, co.PersonAffStatus))

            if aff is None or status is None:
                return None
        else:
            logger.error("Wrong syntax for affiliation: %r", affiliation)
            return None

        return aff, status

    person_affs = [parse_affiliation_string(x) for x in args.person_affs]
    fagperson_affs = [parse_affiliation_string(x) for x in args.fagperson_affs]

    ou_perspective = get_constant(db, parser, co.OUPerspective,
                                  args.ou_perspective)
    authoritative_system = get_constant(db, parser, co.AuthoritativeSystem,
                                        args.authoritative_system)

    if ou_perspective is None:
        logger.error('No valid OU perspective given')
        return None

    if authoritative_system is None:
        logger.error('No valid authoritative system given')
        return None

    if args.commit:
        logger.info('Changes will be committed')
    else:
        logger.info('Dryrun mode, no changes will be committed')

    valid_fagperson_fields = ['work_title', 'phone', 'fax', 'mobile']

    if args.no_extra_fields:
        fagperson_fields = {x: False for x in valid_fagperson_fields}
    elif args.fagperson_fields:
        fagperson_fields = {x: False for x in valid_fagperson_fields}
        for field in args.fagperson_fields:
            if field in fagperson_fields:
                fagperson_fields[field] = True
    else:
        fagperson_fields = None

    syncer = HR2FSSyncer(person_affs,
                         fagperson_affs,
                         authoritative_system,
                         ou_perspective,
                         db,
                         fs,
                         co,
                         fagperson_export_fields=fagperson_fields,
                         use_cache=True,
                         email_cache=args.email_cache,
                         commit=args.commit)

    syncer.sync_to_fs()

    if args.commit:
        logger.info('Committing FS db')
        fs.db.commit()
    else:
        logger.info('Rolling back changes in the FS db')
        fs.db.rollback()
    logger.info('Done syncing to FS')
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '-o', '--output',
        metavar='FILE',
        type=argparse.FileType('w'),
        default='-',
        help='output file for report, defaults to stdout')
    parser.add_argument(
        '-e', '--encoding',
        dest='codec',
        default=DEFAULT_ENCODING,
        type=codec_type,
        help="output file encoding, defaults to %(default)s")

    parser.add_argument(
        '--from',
        dest='start_date',
        type=ISO.ParseDate,
        default=now() + RelativeDateTime(days=-7),
        help='date to start searching for new persons from, defaults to'
             ' 7 days ago')
    parser.add_argument(
        '--to',
        dest='end_date',
        type=ISO.ParseDate,
        default=now(),
        help='date to start searching for new persons until, defaults to now')

    source_arg = parser.add_argument(
        '--source_systems',
        default='SAP,FS',
        help="comma separated list of source systems to search through,"
             " defaults to %(default)s")

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)

    src = [
        get_constant(db, parser, co.AuthoritativeSystem, value, source_arg)
        for value in args.source_systems.split(',')]

    logger.info('Start of script %s', parser.prog)
    logger.debug("args: %r", args)
    logger.debug("source_systems: %r", src)

    new_persons = aggregate(
        get_new_persons(db, src, args.start_date, args.end_date),
        'faculty',
        'ou')

    write_html_report(args.output, args.codec, new_persons, args.start_date,
                      args.end_date)

    args.output.flush()
    if args.output is not sys.stdout:
        args.output.close()

    logger.info('Report written to %s', args.output.name)
    logger.info('Done with script %s', parser.prog)
def main():
    global DEBUG

    parser = argparse.ArgumentParser()
    parser.add_argument(
        '-d', '--debug',
        dest='debug',
        action='store_true',
        help='Turn on debugging')
    parser.add_argument(
        '-t', '--type',
        dest='types',
        action='append',
        choices=['email', 'sympa', 'move', 'quarantine', 'delete'],
        required=True)
    parser.add_argument(
        '-m', '--max',
        dest='max_requests',
        default=999999,
        help='Perform up to this number of requests',
        type=int)
    parser.add_argument(
        '-p', '--process',
        dest='process',
        action='store_true',
        help='Perform the queued operations')
    args, _rest = parser.parse_known_args()

    has_move_arg = 'move' in args.types
    arg_group = parser.add_argument_group('Required for move_student requests')
    arg_group.add_argument(
        '--ou-perspective',
        dest='ou_perspective',
        default='perspective_fs'
    )
    arg_group.add_argument(
        '--emne-info-file',
        dest='emne_info_file',
        default=None,
        required=has_move_arg
    )
    arg_group.add_argument(
        '--studconfig-file',
        dest='studconfig_file',
        default=None,
        required=has_move_arg
    )
    arg_group.add_argument(
        '--studie-progs-file',
        dest='studieprogs_file',
        default=None,
        required=has_move_arg
    )
    arg_group.add_argument(
        '--student-info-file',
        dest='student_info_file',
        default=None,
        required=has_move_arg
    )

    logutils.options.install_subparser(parser)
    args = parser.parse_args()
    logutils.autoconf('bofhd_req', args)

    DEBUG = args.debug

    logger.info('Start of script %s', parser.prog)
    logger.debug('args: %r', args)

    if args.process:
        if has_move_arg:
            # Asserting that a legal value is assigned to args.ou_perspective
            args.ou_perspective = get_constant(db, parser, const.OUPerspective,
                                               args.ou_perspective)
            msp = process_requests.MoveStudentProcessor(
                db,
                const,
                args.ou_perspective,
                args.emne_info_file,
                args.studconfig_file,
                args.studieprogs_file,
                default_spread=default_spread,
            )
            # Convert move_student requests into move_user requests
            msp.process_requests(args.student_info_file)

        rp = process_requests.RequestProcessor(db, const)
        rp.process_requests(operations_map, args.types, args.max_requests)
def main():
    parser = argparse.ArgumentParser(
        description='Generate a NSS groups file for a given spread', )
    parser.add_argument(
        '-g',
        '--group',
        dest='group',
        default=None,
        help='Write a filegroups file to %(metavar)s',
        metavar='FILE',
    )
    parser.add_argument(
        '-n',
        '--netgroup',
        dest='netgroup',
        default=None,
        help='Write a netgroups file to %(metavar)s',
        metavar='FILE',
    )
    parser.add_argument(
        '-m',
        '--mnetgroup',
        dest='mnetgroup',
        default=None,
        help='Write netgroups file with hosts to %(metavar)s',
        metavar='FILE',
    )
    parser.add_argument(
        '--this-is-an-ugly-hack',
        dest='hack',
        default=None,
        help=('Write a netgroups file that includes the primary account '
              'of persons to %(metavar)s'),
        metavar='FILE',
    )

    parser.add_argument(
        '--user_spread',
        dest='user_spread',
        help='Filter by user_spread',
    )
    parser.add_argument('--group_spread',
                        dest='group_spread',
                        required=True,
                        help='Filter by group_spread')
    parser.add_argument(
        '--eof',
        dest='e_o_f',
        action='store_true',
        help='End dump file with E_O_F to mark successful completion')
    parser.add_argument('-Z',
                        '--zone',
                        dest='zone',
                        help='dns zone postfix (example: .uio.no.)')

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args()
    Cerebrum.logutils.autoconf('cronjob', args)

    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)

    if args.mnetgroup and not args.zone:
        parser.error('--mnetgroup requires --zone')

    need_user_spread = any(a is not None
                           for a in (args.netgroup, args.hack, args.group))
    if need_user_spread and not args.user_spread:
        parser.error('No --user_spread given')

    args.group_spread = get_constant(db, parser, co.Spread, args.group_spread)
    if args.user_spread:
        args.user_spread = get_constant(db, parser, co.Spread,
                                        args.user_spread)
    if args.zone:
        args.zone = get_constant(db, parser, co.DnsZone, args.zone)

    logger.info('Start %s', parser.prog)
    logger.debug('args: %r', args)

    if args.group:
        logger.debug('generating filegroups...')
        fg = FileGroup(args.group_spread, args.user_spread)
        fg.write_filegroup(args.group, args.e_o_f)
        logger.info('filegroups written to %s', args.group)

    if args.netgroup:
        logger.debug('generating netgroups...')
        ung = UserNetGroup(args.group_spread, args.user_spread)
        ung.write_netgroup(args.netgroup, args.e_o_f)
        logger.info('netgroups written to %s', args.netgroup)

    if args.mnetgroup:
        logger.debug('generating host netgroups...')
        ngu = MachineNetGroup(args.group_spread, None, args.zone)
        ngu.write_netgroup(args.mnetgroup, args.e_o_f)
        logger.info('host netgroups written to %s', args.mnetgroup)

    if args.hack:
        logger.debug('generating netgroups that includes persons...')
        ung = HackUserNetGroupUIO(args.group_spread, args.user_spread)
        ung.write_netgroup(args.hack, args.e_o_f, include_persons=True)
        logger.info('netgroups written to %s', args.hack)

    logger.info('Done %s', parser.prog)
Beispiel #22
0
def main():
    global DEBUG

    parser = argparse.ArgumentParser()
    parser.add_argument('-d',
                        '--debug',
                        dest='debug',
                        action='store_true',
                        help='Turn on debugging')
    parser.add_argument(
        '-t',
        '--type',
        dest='types',
        action='append',
        choices=['email', 'sympa', 'move', 'quarantine', 'delete'],
        required=True)
    parser.add_argument('-m',
                        '--max',
                        dest='max_requests',
                        default=999999,
                        help='Perform up to this number of requests',
                        type=int)
    parser.add_argument('-p',
                        '--process',
                        dest='process',
                        action='store_true',
                        help='Perform the queued operations')
    args, _rest = parser.parse_known_args()

    has_move_arg = 'move' in args.types
    arg_group = parser.add_argument_group('Required for move_student requests')
    arg_group.add_argument('--ou-perspective',
                           dest='ou_perspective',
                           default='perspective_fs')
    arg_group.add_argument('--emne-info-file',
                           dest='emne_info_file',
                           default=None,
                           required=has_move_arg)
    arg_group.add_argument('--studconfig-file',
                           dest='studconfig_file',
                           default=None,
                           required=has_move_arg)
    arg_group.add_argument('--studie-progs-file',
                           dest='studieprogs_file',
                           default=None,
                           required=has_move_arg)
    arg_group.add_argument('--student-info-file',
                           dest='student_info_file',
                           default=None,
                           required=has_move_arg)

    logutils.options.install_subparser(parser)
    args = parser.parse_args()
    logutils.autoconf('bofhd_req', args)

    DEBUG = args.debug

    logger.info('Start %s', parser.prog)
    logger.debug('args: %r', args)

    if args.process:
        if has_move_arg:
            logger.info('Processing move requests')
            # Asserting that a legal value is assigned to args.ou_perspective
            args.ou_perspective = get_constant(db, parser, const.OUPerspective,
                                               args.ou_perspective)
            msp = process_requests.MoveStudentProcessor(
                db,
                const,
                args.ou_perspective,
                args.emne_info_file,
                args.studconfig_file,
                args.studieprogs_file,
                default_spread=default_spread,
            )
            # Convert move_student requests into move_user requests
            msp.process_requests(args.student_info_file)

        logger.info('Processing regular requests')
        rp = process_requests.RequestProcessor(db, const)
        rp.process_requests(operations_map, args.types, args.max_requests)

    logger.info('Done %s', parser.prog)
def main(inargs=None):
    parser = argparse.ArgumentParser(
        description="Modify person spreads from a list of persons",
        epilog=("Multiple spreads can be given, e.g. --add-spread=foo "
                "--add-spread=bar --remove-spread=baz"),
    )
    parser.add_argument(
        'filename',
        help="Remove spread from person_ids in %(metavar)s",
        metavar='filename',
    )
    add_spread_arg = parser.add_argument(
        '--add-spread',
        dest='to_add',
        action='append',
        help="Add %(metavar)s to each selected person",
        metavar='spread',
    )
    del_spread_arg = parser.add_argument(
        '--remove-spread',
        dest='to_remove',
        action='append',
        help="Remove %(metavar)s from each selected person",
        metavar='spread',
    )
    add_commit_args(parser)
    Cerebrum.logutils.options.install_subparser(parser)

    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('tee', args)

    logger.info('Start of %s', parser.prog)
    logger.debug('args: %r', args)

    db = Factory.get('Database')()
    db.cl_init(change_program=parser.prog)
    co = Factory.get('Constants')(db)

    to_add = [
        get_constant(db, parser, co.Spread, v, argument=add_spread_arg)
        for v in (args.to_add or ())
    ]
    to_remove = [
        get_constant(db, parser, co.Spread, v, argument=del_spread_arg)
        for v in (args.to_remove or ())
    ]

    if not to_add and not to_remove:
        parser.error("No spreads given, nothing to do")

    update_spreads(
        db,
        read_integers(args.filename),
        to_add=to_add,
        to_remove=to_remove,
    )

    if args.commit:
        logger.info('Commiting changes')
        db.commit()
    else:
        db.rollback()
        logger.info('Rolling back changes')
    logger.info('Done %s', parser.prog)
def main(inargs=None):
    parser = argparse.ArgumentParser(description='')

    source_args = parser.add_argument_group('Report parameters')
    ss_arg = source_args.add_argument('-s', '--source-system', required=True)
    affs_arg = source_args.add_argument(
        '-u',
        '--person-user-mismatch',
        action='append',
        default=[],
        dest='user_select',
        help='Report on user accounts for persons with %(metavar)s',
        metavar='AFFILIATION')
    rule_arg = source_args.add_argument(
        '-g',
        '--user-group-mismatch',
        action='append',
        default=[],
        dest='group_select',
        type=group_rule,
        help='Report on users missing for the affiliation -> group rule',
        metavar='AFF:status:sko:group')

    file_args = parser.add_argument_group('Report file')
    file_args.add_argument('-f',
                           '--output-file',
                           type=argparse.FileType('w'),
                           default=None,
                           help="Write report to file, '-' for stdout")
    file_args.add_argument(
        '-e',
        '--output-encoding',
        type=codec_type,
        default=DEFAULT_FILE_ENCODING,
        help="Write file using the given encoding (default: %(default)s)")

    mail_args = parser.add_argument_group('Report email')
    to_arg = mail_args.add_argument('--to',
                                    action='append',
                                    default=[],
                                    dest='mail_to',
                                    type=email_address,
                                    help="Send an email report to %(metavar)s",
                                    metavar='ADDR')
    cc_arg = mail_args.add_argument(
        '--cc',
        action='append',
        default=[],
        dest='mail_cc',
        type=email_address,
        help="Also send email report to %(metavar)s",
        metavar='ADDR')
    from_arg = mail_args.add_argument('--from',
                                      default=DEFAULT_MAIL_FROM,
                                      dest='mail_from',
                                      type=email_address,
                                      help="Send reports from %(metavar)s",
                                      metavar='ADDR')
    subj_arg = mail_args.add_argument(
        '--subject',
        dest='mail_subject',
        type=UnicodeType(),
        help="Also send email report to %(metavar)s",
        metavar='ADDR')
    mail_args.add_argument(
        '--encoding',
        dest='mail_codec',
        type=codec_type,
        default=DEFAULT_MAIL_ENCODING,
        help="Charset to use for email (default: %(default)s")

    def check_mail_args(args):
        args_set = [
            arg for arg in (to_arg, from_arg, subj_arg, cc_arg)
            if getattr(args, arg.dest) != arg.default
        ]
        args_missing = [
            arg for arg in (to_arg, from_arg, subj_arg)
            if not bool(getattr(args, arg.dest))
        ]
        if len(args_set) > 0 and len(args_missing) > 0:
            parser.error(
                argparse.ArgumentError(
                    args_set[0],
                    "missing {0}".format(', '.join('/'.join(m.option_strings)
                                                   for m in args_missing))))
        return len(args_set) > 0

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    send_mail = check_mail_args(args)

    if not any((args.user_select, args.group_select)):
        parser.error("No selections given - nothing to do")

    if not any((send_mail, args.output_file)):
        parser.error("No destination for report - nothing to do")

    db = Factory.get("Database")()
    co = Factory.get("Constants")(db)

    source_system = get_constant(db, parser, co.AuthoritativeSystem,
                                 args.source_system, ss_arg)
    user_affs = [
        get_constant(db, parser, co.PersonAffiliation, c, affs_arg)
        for c in args.user_select
    ]
    group_rules = make_rules(db, parser, args.group_select, rule_arg)

    # Start of script
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start of script %s', parser.prog)
    logger.debug('args: %r', args)
    logger.info("Affiliations for user report: %r", args.user_select)
    logger.info("Rules for group report: %r", args.group_select)
    logger.info("Sending email report: %r", send_mail)
    if send_mail:
        logger.debug("mail to      : %r", args.mail_to)
        logger.debug("mail cc      : %r", args.mail_cc)
        logger.debug("mail from    : %r", args.mail_from)
        logger.debug("mail subject : %r", args.mail_subject)

    reports = []

    if group_rules:
        reports.append(make_group_report(db, source_system, group_rules))

    if user_affs:
        reports.append(make_user_report(db, source_system, user_affs))

    if not reports:
        logger.warning("Nothing to report")

    report = format_report("\n".join(reports))

    if reports and send_mail:
        send_report(args, report)

    if args.output_file:
        write_report(args.output_file, report)
        if args.output_file is not sys.stdout:
            args.output_file.close()
        logger.info("Wrote report to %r", args.output_file.name)
def main(inargs=None):
    """Main processing 'hub' of program.

    Decides which areas to generate, then generates them sequentially,
    dumping collected and relevant info to STDOUT along the way.
    """
    parser = argparse.ArgumentParser(
        description="Prints changelog summaries to stdout",
        epilog=help_epilog,
        formatter_class=argparse.RawTextHelpFormatter,
    )
    parser.add_argument(
        '--affiliations',
        action='store_true',
        default=False,
        help='Break down totals by affiliation',
    )
    src_arg = parser.add_argument(
        '--source-system',
        help='Show information for given source system',
    )
    parser.add_argument(
        '--details',
        action='store_true',
        default=False,
        help=('List details about the events in question. '
              'The exact type of details will vary by event.'),
    )
    parser.add_argument(
        '--from',
        dest='from_date',
        type=mx.DateTime.ISO.ParseDate,
        default=relative_date(-7),
        help=('Start date for events to be processed. '
              'Default value is Monday of last week.'),
    )
    parser.add_argument(
        '--to',
        dest='to_date',
        type=mx.DateTime.ISO.ParseDate,
        default=relative_date(0),
        help=('End-date for events to be processed. '
              'Default value is Sunday of last week.'),
    )
    parser.add_argument(
        '--header',
        default=default_header_file,
        help=('Template-file to use as header rather than the one '
              'specified in cereconf.'),
    )
    events_arg = parser.add_argument(
        '--events',
        help=('Comma-seperated list of events to process. '
              'Default is to process alt events that have handlers '
              'defined, i.e: %s' % (default_events, )),
    )
    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info("Statistics for Cerebrum activities")
    logger.debug('args=%r', args)

    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)
    cl = Factory.get('CLConstants')(db)

    def get_change_type(value):
        const_value = cl.resolve_constant(db, value, cl.ChangeType)
        if const_value is None:
            raise ValueError("invalid constant value: %r" % (value, ))
        return const_value

    if args.source_system:
        source_system = argutils.get_constant(db,
                                              parser,
                                              co.AuthoritativeSystem,
                                              args.source_system,
                                              argument=src_arg)
    else:
        source_system = None
    logger.debug('source_system=%r', source_system)

    with argutils.ParserContext(parser, events_arg):
        event_types = [
            get_change_type(v) for v in (
                args.events.split(',') if args.events else default_events)
        ]
        if not event_types:
            raise ValueError('No valid event-types specified')
    logger.debug('event_types=%r', event_types)

    logger.info("Statistics for Cerebrum activities - processing started")
    logger.debug("Time period: from: '%s'; up to: '%s'",
                 str(args.from_date.date), str(args.to_date.date))

    print("")
    print("Statistics covering the period from %s up to %s" %
          (args.from_date.date, args.to_date.date))
    print(get_header(args.header))

    # Iterate over all event types, retrieve info and generate output
    # based on it.
    for current_type in event_types:
        logger.info("processing change_type=%r", current_type)
        processor = get_processor(db, current_type)
        logger.debug('using processor=%r', processor)
        processor.process_events(start_date=args.from_date,
                                 end_date=args.to_date)
        if args.affiliations:
            processor.calculate_count_by_affiliation()
        # count by source system only makes sense for person entities
        if source_system and str(current_type) == 'person:create':
            processor.calculate_count_by_source_system(source_system)
        processor.print_report(print_affiliations=args.affiliations,
                               print_source_system=bool(source_system),
                               print_details=args.details)
    print("")

    logger.info("Statistics for Cerebrum activities - done")
def main(inargs=None):
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description=description,
        epilog=epilog,
    )

    # TODO: Rework so that *both* before and after can be used
    date_arg = parser.add_mutually_exclusive_group(required=True)
    date_arg.add_argument(
        '-a',
        '--after',
        metavar='DAYS',
        type=days_ago_date_type,
        help='report quarantines started after %(metavar)s days ago')
    date_arg.add_argument(
        '-b',
        '--before',
        metavar='DAYS',
        type=days_ago_date_type,
        help='report quarantines started before %(metavar)s days ago')

    parser.add_argument(
        '-m',
        '--mail',
        metavar='FILE',
        dest='template',
        help="use template from %(metavar)s and send report by mail")
    q_arg = parser.add_argument('-q',
                                '--quarantine',
                                dest='quarantines',
                                action='append',
                                default=[],
                                help="only report on quarantines of this type")

    parser.add_argument(
        '-d',
        '--dryrun',
        action='store_true',
        default=False,
        help="dryrun, don't send mail even if an email template is supplied")

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    logger.info('Start of script %s', parser.prog)
    logger.debug("args: %r", args)

    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)

    quarantines = [
        argutils.get_constant(db, parser, co.Quarantine, q_value, q_arg)
        for q_value in args.quarantines
    ] or None

    logger.info("quarantines: %r", quarantines)
    logger.info("mail template: %r", args.template)
    logger.info("after: %s", args.after)
    logger.info("before: %s", args.before)

    message = get_template(args.template) if args.template else None

    logger.debug("fetching quarantined accounts...")
    accounts = unique(
        get_quarantined(db, args.after, args.before, q_types=quarantines))
    logger.info("found %d quarantined accounts to report", len(accounts))

    # For legacy reasons -- this script has always written the account names to
    # stdout. TODO: This can probably be removed?
    for account_name in accounts:
        print(account_name)

    if message:
        logger.debug("sending report by mail")
        prepare_report(message, accounts)
        Cerebrum.utils.email.send_message(message, debug=args.dryrun)

    logger.info('Done with script %s', parser.prog)
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-o',
                        '--output',
                        metavar='FILE',
                        type=argparse.FileType('w'),
                        default='-',
                        help='output file for report, defaults to stdout')
    parser.add_argument('-e',
                        '--encoding',
                        dest='codec',
                        default=DEFAULT_ENCODING,
                        type=codec_type,
                        help="output file encoding, defaults to %(default)s")
    spread_arg = parser.add_argument(
        '-s',
        '--spread',
        dest='spread',
        required=True,
        help='name of spread to filter accounts by')
    parser.add_argument(
        '-g',
        '--exclude_groups',
        dest='excluded_groups',
        help='comma-separated list of groups to be excluded from the report')
    parser.add_argument('-i',
                        '--include-expired',
                        action='store_true',
                        default=False,
                        help='include expired accounts')

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)

    spread = get_constant(db, parser, co.Spread, args.spread, spread_arg)

    excluded_groups = []
    if args.excluded_groups is not None:
        excluded_groups.extend(args.excluded_groups.split(','))

    logger.info('Start of script %s', parser.prog)
    logger.debug("args: %r", args)
    logger.info("spread: %s", text_type(spread))

    accounts = list(
        get_accounts(db, spread, args.include_expired, excluded_groups))

    title = ('Accounts with email_target=account, without spread=%s' %
             text_type(spread))
    write_html_report(args.output, args.codec, title, accounts)

    args.output.flush()
    if args.output is not sys.stdout:
        args.output.close()

    logger.info('Report written to %s', args.output.name)
    logger.info('Done with script %s', parser.prog)
Beispiel #28
0
def main(inargs=None):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('-o',
                        '--output',
                        metavar='FILE',
                        type=argparse.FileType('w'),
                        default='-',
                        help='output file for report, defaults to stdout')
    parser.add_argument('-e',
                        '--encoding',
                        dest='codec',
                        default=DEFAULT_ENCODING,
                        type=codec_type,
                        help="output file encoding, defaults to %(default)s")

    parser.add_argument(
        '--from',
        dest='start_date',
        type=ISO.ParseDate,
        default=now() + RelativeDateTime(days=-7),
        help='date to start searching for new persons from, defaults to'
        ' 7 days ago')
    parser.add_argument(
        '--to',
        dest='end_date',
        type=ISO.ParseDate,
        default=now(),
        help='date to start searching for new persons until, defaults to now')

    source_arg = parser.add_argument(
        '--source_systems',
        default='SAP,FS',
        help="comma separated list of source systems to search through,"
        " defaults to %(default)s")

    Cerebrum.logutils.options.install_subparser(parser)
    args = parser.parse_args(inargs)
    Cerebrum.logutils.autoconf('cronjob', args)

    db = Factory.get('Database')()
    co = Factory.get('Constants')(db)

    src = [
        get_constant(db, parser, co.AuthoritativeSystem, value, source_arg)
        for value in args.source_systems.split(',')
    ]

    logger.info('Start of script %s', parser.prog)
    logger.debug("args: %r", args)
    logger.debug("source_systems: %r", src)

    new_persons = aggregate(
        get_new_persons(db, src, args.start_date, args.end_date), 'faculty',
        'ou')

    write_html_report(args.output, args.codec, new_persons, args.start_date,
                      args.end_date)

    args.output.flush()
    if args.output is not sys.stdout:
        args.output.close()

    logger.info('Report written to %s', args.output.name)
    logger.info('Done with script %s', parser.prog)