def main(): """Start method for this script.""" Factory.get_logger("cronjob") logger.info("Performing uio-tils/uio-ans group updates") try: options, rest = getopt.getopt(sys.argv[1:], "dhs:", ["dryrun", "help", "source-spec="]) except getopt.GetoptError: usage() sys.exit(1) dryrun = False for option, value in options: if option in ("-d", "--dryrun",): dryrun = True elif option in ("-h", "--help",): usage() sys.exit(0) elif option in ("-s", "--source-spec"): sysname, filename = value.split(":") db = Factory.get("Database")() db.cl_init(change_program="update_emp_grp") perform_update(db, sysname, filename) if dryrun: logger.info("updates completed. all changes rolled back") db.rollback() else: db.commit() logger.info("updates completed. all changes committed")
def main(): global dryrun global logger logger = Factory.get_logger("cronjob") options, rest = getopt.getopt(sys.argv[1:], "do:", ["dryrun", "ou-file="]) dryrun = False filename = None for option, value in options: if option in ("-d", "--dryrun"): dryrun = True elif option in ( "-o", "--ou-file", ): source_system, filename = value.split(":", 1) if not filename: logger.error("Missing OU input file") sys.exit(1) db = Factory.get("Database")() db.cl_init(change_program="import_SAP") parser = system2parser(source_system) process_OUs(db, parser(filename, logger))
def main(): """Start method for this script.""" global logger logger = Factory.get_logger("cronjob") logger.info("Performing uio-tils/uio-ans group updates") try: options, rest = getopt.getopt(sys.argv[1:], "dhs:", ["dryrun", "help", "source-spec=",]) except getopt.GetoptError: usage() sys.exit(1) dryrun = False for option, value in options: if option in ("-d", "--dryrun",): dryrun = True elif option in ("-h", "--help",): usage() sys.exit(0) elif option in ("-s", "--source-spec"): sysname, filename = value.split(":") db = Factory.get("Database")() db.cl_init(change_program="update_emp_grp") perform_update(db, sysname, filename) if dryrun: logger.info("updates completed. all changes rolled back") db.rollback() else: db.commit() logger.info("updates completed. all changes committed")
def main(): logger = Factory.get_logger("cronjob") db = Factory.get('Database')() const = Factory.get("Constants")(db) account = Factory.get('Account')(db) auth_prefix, auth_method = "{crypt}", int(const.auth_type_md5_crypt) ldif = LDIFWriter('SERVICES', None) dn = ldif.getconf('dn') ldif.write_container() for username in ldif.getconf('users'): account.clear() try: account.find_by_name(username) except Errors.NotFoundError: logger.error("User '%s' not found" % username) sys.exit(1) passwd = None qh = QuarantineHandler.check_entity_quarantines(db, account.entity_id) if not (qh.should_skip() or qh.is_locked()): try: passwd = account.get_account_authentication(auth_method) except Errors.NotFoundError: logger.warn("Password not found for user %s", username) ldif.write_entry("cn=%s,%s" % (username, dn), { 'description': "Note: The password is maintained in Cerebrum.", 'objectClass': ('applicationProcess', 'simpleSecurityObject'), 'userPassword': auth_prefix + (passwd or "*locked")}) ldif.close()
def main(): global logger, const, cerebrum_db, xmlwriter logger = Factory.get_logger("cronjob") logger.info("generating a new XML for export_ACL") cerebrum_db = Factory.get("Database")() const = Factory.get("Constants")(cerebrum_db) opts, rest = getopt.getopt(sys.argv[1:], "f:", [ "--out-file=", ]) filename = None for option, value in opts: if option in ("-f", "--out-file"): filename = value # fi # od _cache_id_types() stream = AtomicFileWriter(filename) xmlwriter = xmlprinter.xmlprinter( stream, indent_level=2, # Human-readable output data_mode=True, input_encoding="latin1") generate_report() stream.close()
def main(): global db, co, ac, p, ou, et, logger logger = Factory.get_logger("cronjob") db = Factory.get('Database')() co = Factory.get('Constants')(db) ac = Factory.get('Account')(db) p = Factory.get('Person')(db) ou = Factory.get('OU')(db) et = Email.EmailTarget(db) txt_path = "/cerebrum/var/cache/txt" options, rest = getopt.getopt(sys.argv[1:], "t:", ["txt-path=",]) for option, value in options: if option in ("-t", "--txt-path"): txt_path = value # Load dicts with misc info. get_account_info() get_person_contact() # Dump OFK info f = SimilarSizeWriter("%s/ofk.txt" % txt_path, "w") f.max_pct_change = 10 users = process_txt_file(f) f.close()
def main(args=None): ENCODING = 'utf-8' logger = Factory.get_logger('cronjob') db = Factory.get(b'Database')() co = Factory.get(b'Constants')(db) parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-o', '--output', default='/tmp/report.html') commands = parser.add_subparsers(help="available commands") # name name_command = commands.add_parser( 'name', help="Generate report on differences in names.") name_command.set_defaults(func=compare_names) name_command.set_defaults(check_system=co.system_sap) name_command.add_argument('source_system', type=partial(argparse_const, db, co.AuthoritativeSystem)) args = parser.parse_args(args) command = args.func del args.func # Other commands? logger.info('Generating report ({!s})'.format(args.output)) af = AtomicFileWriter(args.output) report = command(db, logger, args) report.find('head/meta[@charset]').set('charset', ENCODING) af.write("<!DOCTYPE html>\n") af.write(ElementTree.tostring(report, encoding=ENCODING)) af.close() logger.info('Done')
def main(): logger = Factory.get_logger("cronjob") db = Factory.get('Database')() const = Factory.get("Constants")(db) account = Factory.get('Account')(db) auth_prefix, auth_method = "{crypt}", int(const.auth_type_md5_crypt) ldif = LDIFWriter('SERVICES', None) dn = ldif.getconf('dn') ldif.write_container() for username in ldif.getconf('users'): account.clear() try: account.find_by_name(username) except Errors.NotFoundError: logger.error("User '%s' not found" % username) sys.exit(1) passwd = None qh = QuarantineHandler.check_entity_quarantines(db, account.entity_id) if not (qh.should_skip() or qh.is_locked()): try: passwd = account.get_account_authentication(auth_method) except Errors.NotFoundError: logger.warn("Password not found for user %s", username) ldif.write_entry( "cn=%s,%s" % (username, dn), { 'description': "Note: The password is maintained in Cerebrum.", 'objectClass': ('applicationProcess', 'simpleSecurityObject'), 'userPassword': auth_prefix + (passwd or "*locked") }) ldif.close()
def main(): try: import argparse except ImportError: import Cerebrum.extlib.argparse as argparse parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-s', '--spread', dest='spread', help='Name of spread to filter accounts by.', required=True) parser.add_argument('-o', '--output', dest='output_file', help='Output file for report. Default is stdout.') parser.add_argument('-i', '--include-expired', action='store_true', help='Include expired accounts.') parser.add_argument('-l', '--logger-name', dest='logname', default='cronjob', help='Specify logger (default: cronjob).') args = parser.parse_args() logger = Factory.get_logger(args.logname) if args.output_file is not None: try: output = open(args.output_file, 'w') except IOError, e: logger.error(e) sys.exit(1)
def main(): global logger logger = Factory.get_logger('cronjob') parser = argparse.ArgumentParser(description=__doc__) required_args = parser.add_argument_group('required arguments') required_args.add_argument('-p', '--person-file', dest='person_file', required=True, help='File containing person data-export ' 'from SAP.') parser.add_argument('--without-fok', dest='use_fok', action='store_false', help='Do not use forretningsområdekode for checking ' 'if a person should be imported. (default: use.)') parser.set_defaults(use_fok=True) parser.add_argument('-c', '--commit', dest='commit', action='store_true', help='Write changes to DB.') args = parser.parse_args() # Creating this locally costs about 20 seconds out of a 3 minute run. global const const = Factory.get("Constants")() global database database = Factory.get("Database")() database.cl_init(change_program='import_SAP') processed_persons = process_people(args.person_file, args.use_fok) clean_person_data(processed_persons) if args.commit: database.commit() logger.info("Committed all changes") else: database.rollback() logger.info("Rolled back all changes")
def main(): global db, co, ac, p, ou, et, logger logger = Factory.get_logger("cronjob") db = Factory.get('Database')() co = Factory.get('Constants')(db) ac = Factory.get('Account')(db) p = Factory.get('Person')(db) ou = Factory.get('OU')(db) et = Email.EmailTarget(db) txt_path = "/cerebrum/var/cache/txt" options, rest = getopt.getopt(sys.argv[1:], "t:", [ "txt-path=", ]) for option, value in options: if option in ("-t", "--txt-path"): txt_path = value # Load dicts with misc info. get_account_info() get_person_contact() # Dump OFK info f = SimilarSizeWriter("%s/ofk.txt" % txt_path, "w") f.max_pct_change = 10 users = process_txt_file(f) f.close()
def main(): global logger logger = Factory.get_logger("cronjob") options, junk = getopt.getopt(sys.argv[1:], "d", ("role-file=", "stprog-file=", "emne-file=", "edu-file=", "undenh-file=", "undakt-file=", "dryrun",)) dryrun = False role_file = stprog_file = emne_file = edu_file = "" undenh_file = undakt_file = "" for option, value in options: if option in ("-d", "--dryrun",): dryrun = True elif option in ("--role-file",): role_file = value elif option in ("--stprog-file",): stprog_file = value elif option in ("--emne-file",): emne_file = value elif option in ("--edu-file",): edu_file = value elif option in ("--undenh-file",): undenh_file = value elif option in ("--undakt-file",): undakt_file = value check_files_exist(role_file, stprog_file, emne_file, edu_file, undenh_file, undakt_file) db = Factory.get("Database")() db.cl_init(change_program="pop-front-groups") fs_handler = FSAttributeHandler(db, stprog_file, emne_file, undenh_file, undakt_file) # get all the roles and assign them to group names roles = collect_roles(role_file, fs_handler) # get all the students and assign them to group names students = collect_student_info(edu_file, fs_handler) # force all FS members to Cerebrum person_ids fs_groups = remap_fnr_to_account_id(db, roles, students) # Make sure all CF groups exist create_fs_groups(db, fs_handler, fs_groups) # synchronise file information with cerebrum synchronize_groups(db, fs_groups) if dryrun: db.rollback() logger.debug("Rollback all changes") else: db.commit() logger.debug("Committed all changes")
def main(argv): """ Start method for this script. """ global logger logger = Factory.get_logger("console") logger.setLevel(logging.INFO) logger.info( "Generating portal export") try: options, rest = getopt.getopt(argv, "o:vh", ["output-file=", "verbose", "help",]) except getopt.GetoptError: usage() sys.exit(1) # Default values output_file = "portal.txt" verbose = False # Why does this look _so_ ugly? for option, value in options: if option in ("-o", "--output-file"): output_file = value elif option in ("-v", "--verbose"): # FIXME: make the logger log more? :) pass elif option in ("-h", "--help"): usage() sys.exit(2) output_text(output_file = output_file)
def main(): global db, constants, account, disk, host global dryrun, logger logger = Factory.get_logger("console") try: opts, args = getopt.getopt(sys.argv[1:], 'f:d', ['file=', 'dryrun']) except getopt.GetoptError: usage() dryrun = False for opt, val in opts: if opt in ('-d', '--dryrun'): dryrun = True elif opt in ('-f', '--file'): infile = val if not infile: usage() db = Factory.get('Database')() db.cl_init(change_program='import_ad') constants = Factory.get('Constants')(db) account = Factory.get('Account')(db) disk = Factory.get('Disk')(db) host = Factory.get('Host')(db) process_line(infile) attempt_commit()
def main(): global logger logger = Factory.get_logger("cronjob") options, junk = getopt.getopt(sys.argv[1:], "a:d", ("affiliation=", "dryrun",)) const = Factory.get("Constants")() dryrun = False affiliations = set() for option, value in options: if option in ("-a", "--affiliation",): affiliation = const.human2constant(value, const.PersonAffiliation) if affiliation is not None: affiliations.add(affiliation) elif option in ("-d", "--dryrun",): dryrun = True db = Factory.get("Database")() db.cl_init(change_program="create-account") candidates = collect_candidates(db, affiliations) create_accounts(db, candidates) if dryrun: db.rollback() logger.info("Rolled back all changes") else: db.commit() logger.info("Committed all changes")
def main(): try: import argparse except ImportError: import Cerebrum.extlib.argparse as argparse parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-s', '--spread', dest='spread', help='Name of spread to filter accounts by.', required=True) parser.add_argument('-o', '--output', dest='output_file', help='Output file for report. Default is stdout.') parser.add_argument('-i', '--include-expired', action='store_true', help='Include expired accounts.') parser.add_argument('-l', '--logger-name', dest='logname', default='cronjob', help='Specify logger (default: cronjob).') args = parser.parse_args() logger = Factory.get_logger(args.logname) if args.output_file is not None: try: output = open(args.output_file, 'w') except IOError, e: logger.error(e) sys.exit(1)
def main(): global db, constants, account_init, group, posixgroup global default_creator_id global dryrun, logger logger = Factory.get_logger("console") try: opts, args = getopt.getopt(sys.argv[1:], 'f:d', ['file=', 'dryrun']) except getopt.GetoptError: usage() dryrun = False for opt, val in opts: if opt in ('-d', '--dryrun'): dryrun = True elif opt in ('-f', '--file'): infile = val db = Factory.get('Database')() db.cl_init(change_program='import_groups') constants = Factory.get('Constants')(db) account_init = Factory.get('Account')(db) account_init.find_by_name(cereconf.INITIAL_ACCOUNTNAME) default_creator_id = account_init.entity_id group = Factory.get('Group')(db) posixgroup = PosixGroup.PosixGroup(db) process_line(infile)
def main(): global db, constants, account, disk, host global dryrun, logger logger = Factory.get_logger("console") try: opts, args = getopt.getopt(sys.argv[1:], 'f:d', ['file=', 'dryrun']) except getopt.GetoptError: usage() dryrun = False for opt, val in opts: if opt in ('-d', '--dryrun'): dryrun = True elif opt in ('-f', '--file'): infile = val if not infile: usage() db = Factory.get('Database')() db.cl_init(change_program='import_ad') constants = Factory.get('Constants')(db) account = Factory.get('Account')(db) disk = Factory.get('Disk')(db) host = Factory.get('Host')(db) process_line(infile) attempt_commit()
def main(): global logger logger = Factory.get_logger("cronjob") try: opts, args = getopt.getopt(sys.argv[1:], 't:s:h:d:', ['help']) except getopt.GetoptError: usage() fname = spread = hostname = diskname = None for opt, val in opts: if opt in ('--help',): usage(0) elif opt in ('-t',): fname = val elif opt in ('-s',): spread = co.Spread(val) try: int(spread) except Errors.NotFoundError: print "Unknown spread code:", val elif opt in ('-d',): diskname = val elif opt in ('-h',): hostname = val if not opts or (hostname and diskname) or fname is None or spread is None: usage() list_quotas(fname, hostname, diskname, spread)
def main(): global dryrun global logger logger = Factory.get_logger("cronjob") options, rest = getopt.getopt(sys.argv[1:], "do:", ["dryrun", "ou-file="]) dryrun = False filename = None for option, value in options: if option in ("-d", "--dryrun"): dryrun = True elif option in ("-o", "--ou-file",): source_system, filename = value.split(":", 1) if not filename: logger.error("Missing OU input file") sys.exit(1) db = Factory.get("Database")() db.cl_init(change_program="import_SAP") parser = system2parser(source_system) process_OUs(db, parser(filename, logger))
def main(): global db, constants, account global logger, outfile, person outfile = None logger = Factory.get_logger("cronjob") try: opts, args = getopt.getopt(sys.argv[1:], 'f:', ['file=']) except getopt.GetoptError: usage() dryrun = False for opt, val in opts: if opt in ('-f', '--file'): outfile = val if outfile is None: outfile = '/cerebrum/var/cache/MAIL/mail_data.dat' db = Factory.get('Database')() constants = Factory.get('Constants')(db) account = Factory.get('Account')(db) person = Factory.get('Person')(db) email_data = generate_email_data() write_email_file(email_data, outfile)
def main(): global logger logger = Factory.get_logger("cronjob") try: opts, args = getopt.getopt(sys.argv[1:], 't:s:h:d:', ['help']) except getopt.GetoptError: usage() fname = spread = hostname = diskname = None for opt, val in opts: if opt in ('--help', ): usage(0) elif opt in ('-t', ): fname = val elif opt in ('-s', ): spread = co.Spread(val) try: int(spread) except Errors.NotFoundError: print "Unknown spread code:", val elif opt in ('-d', ): diskname = val elif opt in ('-h', ): hostname = val if not opts or (hostname and diskname) or fname is None or spread is None: usage() list_quotas(fname, hostname, diskname, spread)
def main(): parser = ArgumentParser() parser.add_argument('-c', '--commit', action='store_true', help='Do commit') commit = parser.parse_args().commit db = Factory.get('Database')(client_encoding='UTF-8') loads = pickle.loads logger = Factory.get_logger('console') changes = {} for row in db.get_log_events(): cp = row['change_params'] if cp is None: continue try: val = loads(loads(cp.encode('ISO-8859-1'))) changes[row['change_id']] = val logger.info('Fixing row %s: %s', row['change_id'], val) except TypeError: pass # Do this last, because it will lock change log for others for change, params in changes.items(): db.update_log_event(change, params) if commit: logger.info('Done. Committing changes') db.commit() else: logger.info('Done. Rolling back changes') db.rollback()
def main(): global db, logger, const, emailsrv logger = Factory.get_logger("console") db = Factory.get("Database")() const = Factory.get("Constants")(db) db.cl_init(change_program="email_dom") creator = Factory.get("Account")(db) creator.clear() creator.find_by_name('bootstrap_account') infile = None emailsrv = False disk_in = host_in = False try: opts, args = getopt.getopt(sys.argv[1:], 'f:h:d:e', ['file=', 'disk=', 'host=', 'email-server', 'help', 'dryrun']) except getopt.GetoptError, e: print e usage(1)
def main(): global db, co, logger, group_creator, dryrun db = Factory.get('Database')() db.cl_init(change_program='ifi_auto') co = Factory.get('Constants')(db) logger = Factory.get_logger("cronjob") dryrun = False try: opts, args = getopt.getopt(sys.argv[1:], '?', ['dryrun', 'help']) except getopt.GetoptError: usage() for opt, val in opts: if opt == '--dryrun': dryrun = True if opt in ('-?', '--help'): usage(0) supergroup = "internal:uio.no:fs:{autogroup}" fg_supergroup = "internal:uio.no:fs:{ifi_auto_fg}" group_creator = get_account(cereconf.INITIAL_ACCOUNTNAME).entity_id process_groups(supergroup, fg_supergroup) if not dryrun: logger.debug("commit...") db.commit() logger.info("All done")
def main(args=None): ENCODING = 'utf-8' logger = Factory.get_logger('cronjob') db = Factory.get(b'Database')() co = Factory.get(b'Constants')(db) parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-o', '--output', default='/tmp/report.html') commands = parser.add_subparsers(help="available commands") # name name_command = commands.add_parser( 'name', help="Generate report on differences in names.") name_command.set_defaults(func=compare_names) name_command.set_defaults(check_system=co.system_sap) name_command.add_argument( 'source_system', type=partial(argparse_const, db, co.AuthoritativeSystem)) args = parser.parse_args(args) command = args.func del args.func # Other commands? logger.info('Generating report ({!s})'.format(args.output)) af = AtomicFileWriter(args.output) report = command(db, logger, args) report.find('head/meta[@charset]').set('charset', ENCODING) af.write("<!DOCTYPE html>\n") af.write(ElementTree.tostring(report, encoding=ENCODING)) af.close() logger.info('Done')
def main(): global dryrun, constants, person, ephorte_role, logger dryrun = False db = Factory.get('Database')() db.cl_init(change_program="set_eph_std") constants = Factory.get('Constants')(db) person = Factory.get('Person')(db) ephorte_role = EphorteRole(db) logger = Factory.get_logger("console") try: opts, args = getopt.getopt(sys.argv[1:], '', ['dryrun', 'set', 'remove', 'help']) except getopt.GetoptError: usage() for opt, val in opts: if opt in ('--dryrun', ): logger.info("assuming dryrun-mode") dryrun = True elif opt in ('--help', ): logger.debug("printing help text") usage() elif opt in ('--set', ): set_standard_role() elif opt in ('--remove', ): remove_double_std_roles() if dryrun: db.rollback() logger.info("DRYRUN: Roll back changes") else: db.commit() logger.info("Committing changes")
def main(): global logger filename = None spread_str = None person = {} logger = Factory.get_logger("console") logger.info("Starting ephorte_dump") try: opts, args = getopt.getopt(sys.argv[1:], 's:o:') except getopt.GetoptError: usage(1) if args: usage(1) for opt, val in opts: if opt in ('-o', '--output-file'): filename = val if opt in ('-s', '--spread'): spread_str = val if spread_str is not None and filename is not None: spread_id = int(getattr(co, spread_str)) spread = const.Spread(spread_id) if spread.entity_type is not const.entity_account: print "spread_type has to be account spread." usage(0) ephorte_export(spread_id, filename) else: usage(0)
def main(): """Main driver for the file generation.""" global xmlwriter, db, const, logger db = Factory.get("Database")() const = Factory.get("Constants")(db) logger = Factory.get_logger("cronjob") try: opts, args = getopt.getopt(sys.argv[1:], "o:", ["out-file="]) except getopt.GetoptError: usage(1) filename = None for opt, val in opts: if opt in ('-o', '--out-file'): filename = val if not filename: usage(1) stream = AtomicFileWriter(filename) xmlwriter = xmlprinter.xmlprinter(stream, indent_level=2, # human-friendly output data_mode=True, input_encoding="UTF-8") # Get information about persons persons = fetch_person_data() # Get information about courses (kurs) courses = fetch_course_data() # Generate and write document generate_document(persons, courses) stream.close()
def main(): global db, constants, account, group global default_creator_id, default_group_id global dryrun, logger logger = Factory.get_logger("console") try: opts, args = getopt.getopt(sys.argv[1:], 'f:d', ['file=', 'dryrun']) except getopt.GetoptError: usage() dryrun = False for opt, val in opts: if opt in ('-d', '--dryrun'): dryrun = True elif opt in ('-f', '--file'): infile = val if infile is None: usage() db = Factory.get('Database')() db.cl_init(change_program='import_nonpers') constants = Factory.get('Constants')(db) account = Factory.get('Account')(db) group = Factory.get('Group')(db) account.find_by_name(cereconf.INITIAL_ACCOUNTNAME) default_creator_id = account.entity_id process_line(infile) attempt_commit()
def main(): """Main driver for the file generation.""" global xmlwriter, db, const, logger db = Factory.get("Database")() const = Factory.get("Constants")(db) logger = Factory.get_logger("cronjob") try: opts, args = getopt.getopt(sys.argv[1:], "o:", ["out-file="]) except getopt.GetoptError: usage(1) filename = None for opt, val in opts: if opt in ('-o', '--out-file'): filename = val if not filename: usage(1) stream = AtomicFileWriter(filename) xmlwriter = xmlprinter.xmlprinter(stream, indent_level=2, # human-friendly output data_mode=True, input_encoding="UTF-8") # Get information about persons persons = fetch_person_data() # Get information about courses (kurs) courses = fetch_course_data() # Generate and write document generate_document(persons, courses) stream.close()
def main(): global logger, const, cerebrum_db, xmlwriter logger = Factory.get_logger("cronjob") logger.info("generating a new XML for export_ACL") cerebrum_db = Factory.get("Database")() const = Factory.get("Constants")(cerebrum_db) opts, rest = getopt.getopt(sys.argv[1:], "f:", ["--out-file=",]) filename = None for option, value in opts: if option in ("-f", "--out-file"): filename = value # fi # od _cache_id_types() stream = AtomicFileWriter(filename) xmlwriter = xmlprinter.xmlprinter(stream, indent_level = 2, # Human-readable output data_mode = True, input_encoding = "latin1") generate_report() stream.close()
def init_globals(): global db, const, logger, fnr2account_id global dump_dir, dryrun, immediate_evu_expire # Håndter upper- og lowercasing av strenger som inneholder norske # tegn. locale.setlocale(locale.LC_CTYPE, ('en_US', 'iso88591')) dump_dir = cereconf.FS_DATA_DIR dryrun = False logger = Factory.get_logger("cronjob") immediate_evu_expire = False opts, rest = getopt.getopt(sys.argv[1:], "d:r", [ "dump-dir=", "dryrun", "immediate-evu-expire", ]) for option, value in opts: if option in ("-d", "--dump-dir"): dump_dir = value elif option in ("-r", "--dryrun"): dryrun = True elif option in ("--immediate-evu-expire", ): immediate_evu_expire = True # fi # od db = Factory.get("Database")() db.cl_init(change_program='pop_extern_grps') const = Factory.get("Constants")(db) fnr2account_id = {} prefetch_primaryusers()
def parse_it(): """Argument parsing.""" import argparse from Cerebrum.Utils import Factory logger = Factory.get_logger('cronjob') parser = argparse.ArgumentParser( description='Delete person data on grounds of originating source' ' systems') parser.add_argument('--commit', action='store_true', help='Run in commit-mode (default: off)') parser.add_argument('--commit-threshold', default=1, type=int, metavar='N', help='Commit per N change (default: 1)') parser.add_argument('--systems', nargs='+', metavar='SYSTEM', help='Systems that should be cleaned (e.g. SAP)') parser.add_argument('--grace', default=360, type=int, metavar='N', help='Don\'t clean persons who has lost their' ' affiliation in the last N days') args = parser.parse_args() system_to_cleaner = {'FS': update_person, 'SAP': update_person_with_titles, 'EKSTENS': update_person} system_to_selectors = {'FS': [select_addresses, select_contact_info, select_names], 'SAP': [select_addresses, select_contact_info, select_titles, select_names], 'EKSTENS': [select_addresses, select_contact_info, select_names]} for x in args.systems: if x not in system_to_cleaner: raise NotImplementedError( 'Cleaner for system {} is not implemented'.format(x)) elif x not in system_to_selectors: raise NotImplementedError( 'Selector for system {} is not implemented'.format(x)) clean_it(parser.prog, args.commit, logger, args.systems, system_to_cleaner, system_to_selectors, args.commit_threshold, args.grace)
def main(): global logger filename = None spread_str = None person = {} logger = Factory.get_logger("console") logger.info("Starting ephorte_dump") try: opts, args = getopt.getopt(sys.argv[1:], 's:o:') except getopt.GetoptError: usage(1) if args: usage(1) for opt, val in opts: if opt in ('-o', '--output-file'): filename = val if opt in ('-s', '--spread'): spread_str = val if spread_str is not None and filename is not None: spread_id = int(getattr(co, spread_str)) spread = const.Spread(spread_id) if spread.entity_type is not const.entity_account: print "spread_type has to be account spread." usage(0) ephorte_export(spread_id, filename) else: usage(0)
def init_globals(): global db, const, logger, group, users_only, ou, person db = Factory.get("Database")() const = Factory.get("Constants")(db) group = Factory.get("Group")(db) person = Factory.get("Person")(db) ou = Factory.get("OU")(db) logger = Factory.get_logger("cronjob") fsdb = Database.connect(user='******', service=cereconf.FS_DATABASE_NAME, DB_driver='cx_Oracle') fs = FS(fsdb) cf_dir = '/cerebrum/var/cache/Fronter' try: opts, args = getopt.getopt(sys.argv[1:], '', ['debug-file=', 'debug-level=', 'cf-dir=', ]) except getopt.GetoptError: usage(1) debug_file = os.path.join(cf_dir, "x-import.log") debug_level = 4 set_pwd = True users_only = False for opt, val in opts: if opt == '--debug-file': debug_file = val elif opt == '--debug-level': debug_level = val elif opt == '--uten-passord': set_pwd = False elif opt == '--cf-dir': cf_dir = val else: raise ValueError, "Invalid argument: %r", (opt,) global fronter fronter = Fronter(db, const, logger=logger) filename = os.path.join(cf_dir, 'test.xml') if len(args) == 1: filename = args[0] elif len(args) != 0: usage(2) global fxml fxml = FronterXML(filename, cf_dir = cf_dir, debug_file = debug_file, debug_level = debug_level, fronter = fronter, include_password = set_pwd) # Finn `uname` -> account-data for alle brukere. global new_users new_users = get_new_users()
def init_globals(): global db, const, logger, fnr2account_id global dump_dir, dryrun, immediate_evu_expire # Håndter upper- og lowercasing av strenger som inneholder norske # tegn. locale.setlocale(locale.LC_CTYPE, ('en_US', 'iso88591')) dump_dir = cereconf.FS_DATA_DIR dryrun = False logger = Factory.get_logger("cronjob") immediate_evu_expire = False opts, rest = getopt.getopt(sys.argv[1:], "d:r", ["dump-dir=", "dryrun", "immediate-evu-expire",]) for option, value in opts: if option in ("-d", "--dump-dir"): dump_dir = value elif option in ("-r", "--dryrun"): dryrun = True elif option in ("--immediate-evu-expire",): immediate_evu_expire = True # fi # od db = Factory.get("Database")() db.cl_init(change_program='pop_extern_grps') const = Factory.get("Constants")(db) fnr2account_id = {} prefetch_primaryusers()
def parse_it(): """Argument parsing.""" import argparse from Cerebrum.Utils import Factory logger = Factory.get_logger('cronjob') parser = argparse.ArgumentParser( description='Delete person data on grounds of originating source' ' systems') parser.add_argument('--commit', action='store_true', help='Run in commit-mode (default: off)') parser.add_argument('--commit-threshold', default=1, type=int, metavar='N', help='Commit per N change (default: 1)') parser.add_argument('--systems', nargs='+', metavar='SYSTEM', help='Systems that should be cleaned (e.g. SAP)') parser.add_argument('--grace', default=360, type=int, metavar='N', help='Don\'t clean persons who has lost their' ' affiliation in the last N days') args = parser.parse_args() system_to_cleaner = {'FS': update_person, 'SAP': update_person_with_titles, 'EKSTENS': update_person} system_to_selectors = {'FS': [select_addresses, select_contact_info, select_names], 'SAP': [select_addresses, select_contact_info, select_titles, select_names], 'EKSTENS': [select_addresses, select_contact_info, select_names]} for x in args.systems: if x not in system_to_cleaner: raise NotImplementedError( 'Cleaner for system {} is not implemented'.format(x)) elif x not in system_to_selectors: raise NotImplementedError( 'Selector for system {} is not implemented'.format(x)) clean_it(parser.prog, args.commit, logger, args.systems, system_to_cleaner, system_to_selectors, args.commit_threshold, args.grace)
def main(): global db, co, logger, group_creator, dryrun # handle upper and lower casing of strings with Norwegian letters. locale.setlocale(locale.LC_CTYPE, ('en_US', 'iso88591')) db = Factory.get('Database')() db.cl_init(change_program='ifi_auto') co = Factory.get('Constants')(db) logger = Factory.get_logger("cronjob") dryrun = False try: opts, args = getopt.getopt(sys.argv[1:], '?', ['dryrun', 'help']) except getopt.GetoptError: usage() for opt, val in opts: if opt == '--dryrun': dryrun = True if opt in ('-?', '--help'): usage(0) supergroup = "internal:uio.no:fs:{autogroup}" fg_supergroup = "internal:uio.no:fs:{ifi_auto_fg}" group_creator = get_account(cereconf.INITIAL_ACCOUNTNAME).entity_id process_groups(supergroup, fg_supergroup) if not dryrun: logger.debug("commit...") db.commit() logger.info("All done")
def main(): global verbose, ou, db, co, logger, gen_groups, group, \ old_aff, include_delete, no_name verbose = 0 gen_groups = False include_delete = False logger = Factory.get_logger("cronjob") opts, args = getopt.getopt(sys.argv[1:], 'vp:s:gdf', [ 'verbose', 'person-file=', 'studieprogram-file=', 'generate-groups', 'include-delete', ]) personfile = default_personfile studieprogramfile = default_studieprogramfile for opt, val in opts: if opt in ('-v', '--verbose'): verbose += 1 elif opt in ('-p', '--person-file'): personfile = val elif opt in ('-s', '--studieprogram-file'): studieprogramfile = val elif opt in ('-g', '--generate-groups'): gen_groups = True elif opt in ('-d', '--include-delete'): include_delete = True if "system_fs" not in cereconf.SYSTEM_LOOKUP_ORDER: print "Check your config, SYSTEM_LOOKUP_ORDER is wrong!" sys.exit(1) logger.info("Started") db = Factory.get('Database')() db.cl_init(change_program='import_FS') ou = Factory.get('OU')(db) co = Factory.get('Constants')(db) group = Factory.get('Group')(db) try: group.find_by_name(group_name) except Errors.NotFoundError: group.clear() ac = Factory.get('Account')(db) ac.find_by_name(cereconf.INITIAL_ACCOUNTNAME) group.populate(ac.entity_id, co.group_visibility_internal, group_name, group_desc) group.write_db() if getattr(cereconf, "ENABLE_MKTIME_WORKAROUND", 0) == 1: logger.warn("Warning: ENABLE_MKTIME_WORKAROUND is set") for s in StudentInfo.StudieprogDefParser(studieprogramfile): studieprog2sko[s['studieprogramkode']] = \ _get_sko(s, 'faknr_studieansv', 'instituttnr_studieansv', 'gruppenr_studieansv') if include_delete: old_aff = _load_cere_aff() StudentInfo.StudentInfoParser(personfile, process_person_callback, logger) if include_delete: rem_old_aff() db.commit() logger.info("Found %d persons without name.", no_name) logger.info("Completed")
def main(): global logger logger = Factory.get_logger("cronjob") ofile = None try: opts, args = getopt.getopt(sys.argv[1:], "ho:", ("help", "outfile=")) except getopt.GetoptError, e: usage(str(e))
def main(): global max_change logger = Factory.get_logger("--force" in sys.argv and "console" or "cronjob") try: opts, args = getopt.getopt(sys.argv[1:], "", ("force", "max-change=")) except getopt.GetoptError, e: usage(str(e))
def main(): global logger logger = Factory.get_logger("cronjob") ofile = None try: opts, args = getopt.getopt(sys.argv[1:], "ho:", ("help", "outfile=")) except getopt.GetoptError, e: usage(str(e))
def main(): global max_change logger = Factory.get_logger( "--force" in sys.argv and "console" or "cronjob") try: opts, args = getopt.getopt(sys.argv[1:], "", ("force", "max-change=")) except getopt.GetoptError, e: usage(str(e))
def main(): global db, constants, account, person, ou, group global default_creator_id global dryrun, logger logger = Factory.get_logger("cronjob") try: opts, args = getopt.getopt( sys.argv[1:], 'f:hpad', ['file=', 'dryrun', 'help', 'people', 'accounts']) except getopt.GetoptError: usage(4) dryrun = False import_persons = False create_accs = False infile = "" for opt, val in opts: if opt in ('-d', '--dryrun'): dryrun = True elif opt in ('-f', '--file'): infile = val elif opt in ('-p', '--people'): import_persons = True elif opt in ('-a', '--accounts'): create_accs = True elif opt in ('-h', '--help'): usage(0) else: usage(1) if infile is None: usage(2) if not (import_persons or create_accs): usage(3) db = Factory.get('Database')() db.cl_init(change_program='import_guard') constants = Factory.get('Constants')(db) account = Factory.get('Account')(db) group = Factory.get('Group')(db) person = Factory.get('Person')(db) ou = Factory.get("OU")(db) account.clear() account.find_by_name(cereconf.INITIAL_ACCOUNTNAME) default_creator_id = account.entity_id if import_persons: create_objects(infile, object_type='people') if create_accs: create_objects(infile, object_type='accounts') attempt_commit()
def main(argv): global logger logger = Factory.get_logger("console") opts, junk = getopt.getopt(argv[1:], "w:cps:", ("webapp-user="******"with-commit", "permissions-only", "sudoers-group=",)) webapp_user = "******" sudoers_group = cereconf.BOFHD_SUDOERS_GROUP superuser_group = cereconf.BOFHD_SUPERUSER_GROUP with_commit = False permissions_only = False for option, value in opts: # # account name for the php front-end if option in ("-w", "--webapp-user",): webapp_user = normalize_name(value) # # do not create any entities -- just opsets and auth_roles. elif option in ("-p", "--permissions-only",): permissions_only = True # # whether we should commit the changes elif option in ("-c", "--with-commit",): with_commit = True # # group name for group of accounts capable of su-ing to different # users elif option in ("-s", "--sudoers-group",): sudoers_group = normalize_name(value) db = Factory.get("Database")() db.cl_init(change_program="assign_permissions") logger.debug("webapp-user='******'; sudoers-group='%s'", webapp_user, sudoers_group) if not permissions_only: # # Create system groups for group_name in (superuser_group, sudoers_group,): create_group(group_name, db) # # Create system users for username in (webapp_user,): create_user(username, db) create_permissions(superuser_group, sudoers_group, webapp_user, db) if with_commit: db.commit() logger.debug("Committed all changes") else: db.rollback() logger.debug("Rolled back all changes")
def main(): """ Start method for this script. """ global logger, db db = Factory.get("Database")() logger = Factory.get_logger("cronjob") logger.info("Generating UA dump") try: options, rest = getopt.getopt(sys.argv[1:], "i:o:hdes", [ "input-file=", "output-directory=", "help", "distribute", "employees", "students", ]) except getopt.GetoptError: logger.exception("foo") usage(1) output_directory = None sysname = None person_file = None distribute = False do_employees = False do_students = False for option, value in options: if option in ("-o", "--output-directory"): output_directory = value elif option in ("-i", "--input-file"): sysname, person_file = value.split(":") elif option in ("-h", "--help"): usage(2) elif option in ("-d", "--distribute"): distribute = True elif option in ("-e", "--employees"): do_employees = True elif option in ("-s", "--students"): do_students = True output_file = AtomicFileWriter( os.path.join(output_directory, "uadata.new"), "w") generate_output(output_file, do_employees, do_students, sysname, person_file) output_file.close() diff_file = "uadata.%s" % time.strftime("%Y-%m-%d") do_sillydiff(output_directory, "uadata.old", "uadata.new", diff_file) os.rename(os.path.join(output_directory, "uadata.new"), os.path.join(output_directory, "uadata.old")) if distribute: passwd = db._read_password(cereconf.UA_FTP_HOST, cereconf.UA_FTP_UNAME) ftpput(cereconf.UA_FTP_HOST, cereconf.UA_FTP_UNAME, passwd, output_directory, diff_file, "ua-lt")
def main(): global db, co, account, default_creator_id global disk, spread, host, dryrun, logger, make_disk logger = Factory.get_logger("console") try: opts, args = getopt.getopt(sys.argv[1:], 'f:h:s:d', ['file=', 'host=', 'spread=', 'dryrun']) except getopt.GetoptError: usage() dryrun = False make_disk = False infile = given_host = given_spread = None for opt, val in opts: if opt in ('-d', '--dryrun'): dryrun = True elif opt in ('-f', '--file'): infile = val elif opt in ('-h', '--host'): given_host = val elif opt in ('-s', '--spread'): given_spread = val if not infile or not given_spread: usage() db = Factory.get('Database')() db.cl_init(change_program='import_homes') co = Factory.get('Constants')(db) account = Factory.get('Account')(db) disk = Factory.get('Disk')(db) host = Factory.get('Host')(db) # Find host if given_host: make_disk = True try: host.find_by_name(given_host) except Errors.NotFoundError: logger.error("No host %s found" % given_host) sys.exit(1) # find spread try: spread = getattr(co, given_spread) except AttributeError: logger.error("No spread %s defined" % given_spread) sys.exit(2) account.find_by_name(cereconf.INITIAL_ACCOUNTNAME) default_creator_id = account.entity_id process_line(infile) attempt_commit()
def build_account_events(db, client, account_ids, ad_acc_spread, group_postfix, path_req_disks, acc_attrs, show_diff, password_sync=False): crb_accs_data = [ df.get_crb_account_data(db, acc_id, ad_acc_spread) for acc_id in account_ids ] crb_acc_ad_values = [ mappers.crb_acc_values_to_ad_values(crb_acc_data, path_req_disks, client.config.nis_domain, group_postfix) for crb_acc_data in crb_accs_data] logger = Factory.get_logger("console") events = [] if not password_sync: skipped = 0 desynced = [] not_in_ad = [] for crb_acc in crb_acc_ad_values: if crb_acc.get('quarantine_action') == 'skip': skipped += 1 continue ad_ldap_acc_values = mappers.format_ldap_acc_data( df.get_ad_ldap_acc_values(client, crb_acc['username']), acc_attrs ) if crb_acc['username'] not in ad_ldap_acc_values: not_in_ad.append(crb_acc['account_id']) continue if not equal(crb_acc, ad_ldap_acc_values[crb_acc['username']], acc_attrs, show_diff=show_diff, entity_id=crb_acc['account_id'], entity_type='account'): desynced.append(crb_acc['account_id']) for acc in desynced: events.append(build_event_dict(acc, 'account', 'modify')) for acc in not_in_ad: events.append(build_event_dict(acc, 'account', 'add')) generate_stats(logger, 'accounts', len(desynced), len(not_in_ad), skipped=skipped) else: for crb_acc in crb_acc_ad_values: events.append(build_event_dict(crb_acc['account_id'], 'account', 'modify', attrs=['password'])) return events
def main(): global logger logger = Factory.get_logger("cronjob") options, rest = getopt.getopt(sys.argv[1:], "f:", ("file=", )) filename = None for option, value in options: if option in ("-f", "--file"): filename = value assert filename, "Must provide output filename" generate_file(filename)
def main(): logger = Factory.get_logger("cronjob") # The script is designed to use the mail-module. use_mail_module = True config = ofile = None try: opts, args = getopt.getopt(sys.argv[1:], "ho:m", ("help", "org=", "omit-mail-module")) except getopt.GetoptError, e: usage(str(e))
def __init__(self, max_age=None, db=None, logger=None): assert hasattr(self, 'name') self.db = db or Factory.get('Database')() self.logger = logger or Factory.get_logger('cronjob') self.max_age = max_age self.when = None self.filetype = 'tmp' self.clear_data() self.logger.debug("Initialized cache {!r}, max age is {!r}".format( self.name, self.max_age))
def main(): global logger logger = Factory.get_logger("console") try: opts, args = getopt.getopt(sys.argv[1:], "s:", ["source-spec=",]) except getopt.GetoptError, val: print val usage(1)
def main(): """ Start method for this script. """ global logger logger = Factory.get_logger("cronjob") logger.info("Performing group synchronization") conf = readconf() p = argparse.ArgumentParser() subs = p.add_subparsers(dest='mode') sync = subs.add_parser('sync', help=u'Perform sync') rep = subs.add_parser('report', help=u'Generate report') sync.add_argument('--commit', action='store_true', help=u'Commit') rep.add_argument('-e', '--expired-file', action='store', required=True, help='Locate expired accounts and generate a report') for db in conf.databases: name = db['name'] sync.add_argument('--' + name, action='append_const', const=db, dest='db', help=u'Update {} group'.format(name)) rep.add_argument('--' + name, action='append_const', const=db, dest='db', help=u'Report database {}'.format(name)) opts = p.parse_args() if opts.mode == 'report': report_users(opts.expired_file, databases=(opts.db if opts.db is not None else [ x for x in conf.databases for y in conf.report if x.name == y ])) else: services = opts.db or [] global dryrun dryrun = not opts.commit # preload the account id -> uname mappings used later. db = Factory.get("Database")() const = Factory.get("Constants")() group = Factory.get("Group")(db) global account2name account2name = dict((x["entity_id"], x["entity_name"]) for x in group.list_names(const.account_namespace)) perform_synchronization(services)