def main(): global verbose, f, db, co, ldap, auth, start parser = argparse.ArgumentParser() parser.add_argument('-v', "--verbose", action="count", default=0) parser.add_argument('-m', "--mail-file") parser.add_argument('-s', "--spread", default=ldapconf('MAIL', 'spread', None)) parser.add_argument('-i', "--ignore-size", dest="max_change", action="store_const", const=100) parser.add_argument('-a', "--no-auth-data", dest="auth", action="store_false", default=True) args = parser.parse_args() verbose = args.verbose auth = args.auth db = Factory.get('Database')() co = Factory.get('Constants')(db) start = now() curr = now() if verbose: logger.debug("Loading the EmailLDAP module...") ldap = Factory.get('EmailLDAP')(db) if verbose: logger.debug(" done in %d sec." % (now() - curr)) spread = args.spread if spread is not None: spread = map_spreads(spread, int) f = ldif_outfile('MAIL', args.mail_file, max_change=args.max_change) get_data(spread) end_ldif_outfile('MAIL', f)
def generate_all(fname): """Generate user + group LDIF to fname. @type fname: str @param fname: The file where the ldif data will be written """ logger.debug("Generating ldif into %s", fname) out = ldif_outfile("ORG", fname) out.write(container_entry_string("ORG")) helper = LDIFHelper(logger) logger.debug("Generating user ldif...") out.write(container_entry_string("USER")) for user in helper.yield_users(): dn = user["dn"][0] del user["dn"] out.write(entry_string(dn, user, False)) end_ldif_outfile("USER", out, out) logger.debug("Generating group ldif...") out.write(container_entry_string("GROUP")) for group in helper.yield_groups(): dn = group["dn"][0] del group["dn"] out.write(entry_string(dn, group, False)) end_ldif_outfile("GROUP", out) logger.debug("Done with group ldif (all done)")
def dump(self): fd = ldif_outfile('USER') fd.write(container_entry_string('USER')) noAuth = (None, None) for account_id, vlan_vpn in self.id2vlan_vpn.iteritems(): info = self.auth[account_id] uname = info[0] auth = info[1] ntAuth = self.md4_auth.get(account_id, noAuth)[1] if account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[account_id]) if qh.should_skip(): continue if qh.is_locked(): auth = ntAuth = None dn = ','.join(('uid=' + uname, self.user_dn)) entry = { 'objectClass': ['top', 'account', 'hiofRadiusAccount'], 'uid': (uname,), 'radiusTunnelType': ('13',), 'radiusTunnelMediumType': ('6',), 'radiusTunnelPrivateGroupId': (vlan_vpn[0],), 'radiusClass': (vlan_vpn[1],)} if auth: entry['objectClass'].append('simpleSecurityObject') entry['userPassword'] = ('{crypt}' + auth,) if ntAuth: entry['ntPassword'] = (ntAuth,) fd.write(entry_string(dn, entry, False)) end_ldif_outfile('USER', fd)
def main(inargs=None): defaults = cereconf.LDAP_KURS parser = argparse.ArgumentParser(description="Generate course ldap tree", ) parser.add_argument( '--aktivitetfile', default=defaults.get('aktivitetfile'), required=not defaults.get('aktivitetfile'), help='Use edu activities from %(metavar)s (%(default)s)', metavar='xml-file', ) parser.add_argument( '--enhetfile', default=defaults.get('enhetfile'), required=not defaults.get('enhetfile'), help='Use edu units from %(metavar)s (%(default)s)', metavar='xml-file', ) parser.add_argument( '--emnefile', default=defaults.get('emnefile'), required=not defaults.get('emnefile'), help='Use subjects from %(metavar)s (%(default)s)', metavar='xml-file', ) parser.add_argument( '--picklefile', default=defaults.get('picklefile'), required=not defaults.get('picklefile'), help='Store course participation to %(metavar)s (%(default)s)', metavar='pickle-file', ) parser.add_argument( '--ldiffile', default=defaults.get('file'), required=not defaults.get('file'), help='Write courses LDIF to %(metavar)s (%(default)s)', metavar='ldif-file', ) Cerebrum.logutils.options.install_subparser(parser) args = parser.parse_args(inargs) Cerebrum.logutils.autoconf('cronjob', args) logger.info('Start %s', parser.prog) logger.debug('args: %r', args) db = Factory.get('Database')() cgi = CerebrumGroupInfo(db) sip = StudinfoParsers(args.emnefile, args.aktivitetfile, args.enhetfile) destfile = ldif_outfile('KURS', args.ldiffile) destfile.write(container_entry_string('KURS')) urn_dict = gen_undervisningsaktivitet(cgi, sip, destfile) urn_dict.update(gen_undervisningsenhet(cgi, sip, destfile)) end_ldif_outfile('KURS', destfile) owner_id2urn = gen_owner_id2urn(db, urn_dict) dump_pickle_file(args.picklefile, owner_id2urn) logger.info('Done %s', parser.prog)
def write_subnet_ldif(): DN = ldapconf('SUBNETS', 'dn') startAttr, endAttr, objectClasses = ldapconf('SUBNETS', 'rangeSchema') objectClasses = ('top', 'ipNetwork') + tuple(objectClasses) db = Factory.get('Database')() f = ldif_outfile('SUBNETS') f.write(container_entry_string('SUBNETS')) for row in Subnet(db).search(): cn = "%s/%s" % (row['subnet_ip'], row['subnet_mask']) desc = row['description'] f.write(entry_string("cn=%s,%s" % (cn, DN), { 'objectClass': objectClasses, 'description': (desc and (iso2utf(desc),) or ()), 'ipNetworkNumber': (row['subnet_ip'],), 'ipNetmaskNumber': (netmask_to_ip(row['subnet_mask']),), startAttr: (str(int(row['ip_min'])),), endAttr: (str(int(row['ip_max'])),)})) end_ldif_outfile('SUBNETS', f)
def main(inargs=None): parser = argparse.ArgumentParser( description="Generate a group tree for LDAP", ) parser.add_argument( '--ldiffile', help='Write groups to the ldif-file %(metavar)', metavar='file', ) parser.add_argument( '--picklefile', help='Write group memberships to the pickle-file %(metavar)s', metavar='file', ) Cerebrum.logutils.options.install_subparser(parser) args = parser.parse_args(inargs) if not any((args.ldiffile, args.picklefile)): parser.error('Must use --ldiffile or --picklefile') Cerebrum.logutils.autoconf('cronjob', args) logger.info('Start %s', parser.prog) logger.debug('args: %r', args) ldiffile = args.ldiffile picklefile = args.picklefile db = Factory.get('Database')() dn = ldapconf('GROUP', 'dn') logger.info('Generating LDIF...') destfile = ldif_outfile('GROUP', ldiffile) destfile.write(container_entry_string('GROUP')) mbr2grp = dump_ldif(db, dn, destfile) end_ldif_outfile('GROUP', destfile) logger.info('Wrote LDIF to %r', ldiffile) logger.info('Generating pickle dump...') tmpfname = picklefile + '.tmp' pickle.dump(mbr2grp, open(tmpfname, 'wb'), pickle.HIGHEST_PROTOCOL) os.rename(tmpfname, picklefile) logger.info('Wrote pickle file to %r', picklefile) logger.info('Done %s', parser.prog)
def dump(self): fd = ldif_outfile('RADIUS') logger.debug('writing to %s', repr(fd)) fd.write(container_entry_string('RADIUS')) logger.info('Generating export...') for account_id, vlan_vpn in self.id2vlan_vpn.iteritems(): try: uname = self.account_names[account_id] except KeyError: logger.error('No account name for account_id=%r', account_id) continue try: auth = self.user_password.get(account_id) except LookupError: auth = None try: ntauth = self.nt_password.get(account_id) except LookupError: ntauth = None if account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[account_id]) if qh.should_skip(): continue if qh.is_locked(): auth = ntauth = None dn = ','.join(('uid=' + uname, self.user_dn)) entry = { 'objectClass': ['top', 'account', 'uiaRadiusAccount'], 'uid': (uname, ), 'radiusTunnelType': ('VLAN', ), 'radiusTunnelMediumType': ('IEEE-802', ), 'radiusTunnelPrivateGroupId': (vlan_vpn[0], ), 'radiusClass': (vlan_vpn[1], ), } if auth: entry['objectClass'].append('simpleSecurityObject') entry['userPassword'] = auth if ntauth: entry['ntPassword'] = (ntauth, ) fd.write(entry_string(dn, entry, False)) end_ldif_outfile('RADIUS', fd)
def write_subnet_ldif(): DN = ldapconf('SUBNETS', 'dn') startAttr, endAttr, objectClasses = ldapconf('SUBNETS', 'rangeSchema') objectClasses = ('top', 'ipNetwork') + tuple(objectClasses) db = Factory.get('Database')() f = ldif_outfile('SUBNETS') f.write(container_entry_string('SUBNETS')) for row in Subnet(db).search(): cn = "%s/%s" % (row['subnet_ip'], row['subnet_mask']) desc = row['description'] f.write( entry_string( "cn=%s,%s" % (cn, DN), { 'objectClass': objectClasses, 'description': (desc and (iso2utf(desc), ) or ()), 'ipNetworkNumber': (row['subnet_ip'], ), 'ipNetmaskNumber': (netmask_to_ip(row['subnet_mask']), ), startAttr: (str(int(row['ip_min'])), ), endAttr: (str(int(row['ip_max'])), ) })) end_ldif_outfile('SUBNETS', f)
def generate_dump(db, filename, use_mail_module): ldif = Factory.get('OrgLDIF')(db, logger.getChild('OrgLDIF')) timer = make_timer(logger, 'Starting dump') outfile = ldif_outfile('ORG', filename) logger.debug('writing org data to %r', outfile) timer('Generating org data...') ldif.generate_org_object(outfile) ou_outfile = ldif_outfile('OU', default=outfile, explicit_default=filename) logger.debug('Writing ou data to %r', outfile) timer('Generating ou data...') ldif.generate_ou(ou_outfile) pers_outfile = ldif_outfile('PERSON', default=outfile, explicit_default=filename) logger.debug('Writing person data to %r', outfile) timer('Generating person data...') ldif.generate_person(pers_outfile, ou_outfile, use_mail_module) end_ldif_outfile('PERSON', pers_outfile, outfile) end_ldif_outfile('OU', ou_outfile, outfile) end_ldif_outfile('ORG', outfile) timer("Dump done")
def main(): try: opts, args = getopt.getopt(sys.argv[1:], '', [ 'help', 'aktivitetfile=', 'enhetfile=', 'emnefile=', 'ldiffile=', 'picklefile=']) except getopt.GetoptError: usage(1) aktivitetfile, enhetfile, emnefile, picklefile, ldiffile = map( cereconf.LDAP_KURS.get, ('aktivitetfile', 'enhetfile', 'emnefile', 'picklefile', 'file')) for opt, val in opts: if opt in ('--help',): usage() elif opt in ('--aktivitetfile',): aktivitetfile = val elif opt in ('--enhetfile',): enhetfile = val elif opt in ('--emnefile',): emnefile = val elif opt in ('--picklefile',): picklefile = val elif opt in ('--ldiffile',): ldiffile = val if not (aktivitetfile and enhetfile and emnefile and picklefile and ldiffile) or args: usage(1) logger.info('Start') cgi = CerebrumGroupInfo() sip = StudinfoParsers(emnefile, aktivitetfile, enhetfile) destfile = ldif_outfile('KURS', ldiffile) destfile.write(container_entry_string('KURS')) urn_dict = gen_undervisningsaktivitet(cgi, sip, destfile) urn_dict.update(gen_undervisningsenhet(cgi, sip, destfile)) end_ldif_outfile('KURS', destfile) owner_id2urn = gen_owner_id2urn(urn_dict) dump_pickle_file(picklefile, owner_id2urn) logger.info('Done')
def main(): try: opts, args = getopt.getopt(sys.argv[1:], 'h', [ 'help', 'ldiffile=', 'picklefile=']) except getopt.GetoptError: usage(1) for opt, val in opts: if opt in ('--help',): usage() elif opt in ('--picklefile',): picklefile = val elif opt in ('--ldiffile',): ldiffile = val if not (picklefile and ldiffile) or args: usage(1) destfile = ldif_outfile('GROUP', ldiffile) destfile.write(container_entry_string('GROUP')) dump_ldif(destfile) end_ldif_outfile('GROUP', destfile) tmpfname = picklefile + '.tmp' pickle.dump(mbr2grp, open(tmpfname, 'wb'), pickle.HIGHEST_PROTOCOL) os.rename(tmpfname, picklefile)
def main(): try: opts, args = getopt.getopt(sys.argv[1:], 'h', ['help', 'ldiffile=', 'picklefile=']) except getopt.GetoptError: usage(1) for opt, val in opts: if opt in ('--help', ): usage() elif opt in ('--picklefile', ): picklefile = val elif opt in ('--ldiffile', ): ldiffile = val if not (picklefile and ldiffile) or args: usage(1) destfile = ldif_outfile('GROUP', ldiffile) destfile.write(container_entry_string('GROUP')) dump_ldif(destfile) end_ldif_outfile('GROUP', destfile) tmpfname = picklefile + '.tmp' pickle.dump(mbr2grp, open(tmpfname, 'wb'), pickle.HIGHEST_PROTOCOL) os.rename(tmpfname, picklefile)
def generate_all(fname): """Write user + group LDIF to fname.""" out = ldif_outfile("ORG", fname) logger.debug('writing to %r', out) out.write(container_entry_string("ORG")) helper = LDIFHelper(logger.getChild('LDIFHelper')) logger.info("Generating user ldif...") out.write(container_entry_string("USER")) for user in helper.yield_users(): dn = user["dn"][0] del user["dn"] out.write(entry_string(dn, user, False)) end_ldif_outfile("USER", out, out) logger.debug("Generating group ldif...") out.write(container_entry_string("GROUP")) for group in helper.yield_groups(): dn = group["dn"][0] del group["dn"] out.write(entry_string(dn, group, False)) end_ldif_outfile("GROUP", out)
def main(): logger = Factory.get_logger("cronjob") parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '-o', '--output', type=text_type, dest='output', help='output file') parser.add_argument( '-m,', '--omit-mail-module', action='store_true', dest='omit_mail_module', help='omit the email module') args = parser.parse_args() ldif = Factory.get('OrgLDIF')(Factory.get('Database')(), logger) timer = make_timer(logger, 'Starting dump.') default_output = ldif_outfile('ORG', args.output) ldif.generate_org_object(default_output) ou_output = ldif_outfile('OU', default=default_output, explicit_default=args.output) ldif.generate_ou(ou_output) person_output = ldif_outfile('PERSON', default=default_output, explicit_default=args.output) ldif.generate_person(outfile=person_output, alias_outfile=ou_output, use_mail_module=not args.omit_mail_module) end_ldif_outfile('PERSON', outfile=person_output, default_file=default_output) end_ldif_outfile('OU', outfile=ou_output, default_file=default_output) end_ldif_outfile('ORG', outfile=default_output) timer("Dump done.")
def main(): logger = Factory.get_logger("cronjob") parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-o', '--output', type=text_type, dest='output', help='output file') parser.add_argument('-m,', '--omit-mail-module', action='store_true', dest='omit_mail_module', help='omit the email module') args = parser.parse_args() ldif = Factory.get('OrgLDIF')(Factory.get('Database')(), logger) timer = make_timer(logger, 'Starting dump.') default_output = ldif_outfile('ORG', args.output) ldif.generate_org_object(default_output) ou_output = ldif_outfile('OU', default=default_output, explicit_default=args.output) ldif.generate_ou(ou_output) person_output = ldif_outfile('PERSON', default=default_output, explicit_default=args.output) ldif.generate_person(outfile=person_output, alias_outfile=ou_output, use_mail_module=not args.omit_mail_module) end_ldif_outfile('PERSON', outfile=person_output, default_file=default_output) end_ldif_outfile('OU', outfile=ou_output, default_file=default_output) end_ldif_outfile('ORG', outfile=default_output) timer("Dump done.")
try: importlib.import_module(module_name) except: usage("\nNo configuration with a file name %s found, aborting" % module_name) return 1 ldif = Factory.get('OrgLDIF')(Factory.get('Database')(), logger) timer = make_timer(logger, 'Starting dump.') outfile = ldif_outfile('ORG', ofile) ldif.generate_org_object(outfile) ou_outfile = ldif_outfile('OU', default=outfile, explicit_default=ofile) ldif.generate_ou(ou_outfile) pers_outfile= ldif_outfile('PERSON',default=outfile,explicit_default=ofile) ldif.generate_person(pers_outfile, ou_outfile, use_mail_module) end_ldif_outfile('PERSON', pers_outfile, outfile) end_ldif_outfile('OU', ou_outfile, outfile) end_ldif_outfile('ORG', outfile) timer("Dump done.") def usage(err=0): if err: print >>sys.stderr, err print >>sys.stderr, __doc__ sys.exit(bool(err)) if __name__ == '__main__': main()
if opt in ("-o", "--org"): ofile = val elif opt in ("-m", "--omit-mail-module"): use_mail_module = False else: usage() ldif = Factory.get("OrgLDIF")(Factory.get("Database")(), logger) timer = make_timer(logger, "Starting dump.") outfile = ldif_outfile("ORG", ofile) ldif.generate_org_object(outfile) ou_outfile = ldif_outfile("OU", default=outfile, explicit_default=ofile) ldif.generate_ou(ou_outfile) pers_outfile = ldif_outfile("PERSON", default=outfile, explicit_default=ofile) ldif.generate_person(pers_outfile, ou_outfile, use_mail_module) end_ldif_outfile("PERSON", pers_outfile, outfile) end_ldif_outfile("OU", ou_outfile, outfile) end_ldif_outfile("ORG", outfile) timer("Dump done.") def usage(err=0): if err: print >>sys.stderr, err print >>sys.stderr, __doc__ sys.exit(bool(err)) if __name__ == "__main__": main()
if opt in ("-o", "--org"): ofile = val elif opt in ("-m", "--omit-mail-module"): use_mail_module = False else: usage() ldif = Factory.get('OrgLDIF')(Factory.get('Database')(), logger) timer = make_timer(logger, 'Starting dump.') outfile = ldif_outfile('ORG', ofile) ldif.generate_org_object(outfile) ou_outfile = ldif_outfile('OU', default=outfile, explicit_default=ofile) ldif.generate_ou(ou_outfile) pers_outfile= ldif_outfile('PERSON',default=outfile,explicit_default=ofile) ldif.generate_person(pers_outfile, ou_outfile, use_mail_module) end_ldif_outfile('PERSON', pers_outfile, outfile) end_ldif_outfile('OU', ou_outfile, outfile) end_ldif_outfile('ORG', outfile) timer("Dump done.") def usage(err=0): if err: print >>sys.stderr, err print >>sys.stderr, __doc__ sys.exit(bool(err)) if __name__ == '__main__': main()
def main(inargs=None): parser = argparse.ArgumentParser(description='Generate a mail-db.ldif', ) parser.add_argument( '-v', "--verbose", action="count", default=0, help=('Show some statistics while running. ' 'Repeat the option for more verbosity.'), ) parser.add_argument( '-m', "--mail-file", help='Specify file to write to.', ) parser.add_argument( '-s', "--spread", default=ldapconf('MAIL', 'spread', None), help='Targets printed found in spread.', ) parser.add_argument( '-i', "--ignore-size", dest="max_change", action="store_const", const=100, help='Use file class instead of SimilarSizeWriter.', ) parser.add_argument( '-a', "--no-auth-data", dest="auth", action="store_false", default=True, help="Don't populate userPassword.", ) Cerebrum.logutils.options.install_subparser(parser) args = parser.parse_args(inargs) Cerebrum.logutils.autoconf('cronjob', args) logger.info('Start %s', parser.prog) logger.debug('args: %s', repr(args)) db = Factory.get('Database')() start = now() with log_time('loading the EmailLDAP module'): ldap = Factory.get('EmailLDAP')(db) spread = args.spread if spread is not None: spread = map_spreads(spread, int) # Configure auth if args.auth: auth_attr = ldapconf('MAIL', 'auth_attr', None) user_password = AuthExporter.make_exporter(db, auth_attr['userPassword']) else: user_password = None outfile = ldif_outfile('MAIL', args.mail_file, max_change=args.max_change) logger.debug('writing data to %s', repr(outfile)) with log_time('fetching data', level=logging.INFO): get_data(db, ldap, getattr(user_password, 'cache', None), spread) with log_time('generating ldif', level=logging.INFO): write_ldif(db, ldap, user_password, outfile, verbose=args.verbose) end_ldif_outfile('MAIL', outfile) logger.info("Total time: %ds" % (now() - start)) logger.info('Done %s', parser.prog)
def write_mail_dns(): f = ldif_outfile('MAIL_DNS') hosts, cnames, lower2host, hosts_only_mx = get_hosts_and_cnames() db = Factory.get('Database')() co = Factory.get('Constants')(db) logger = Factory.get_logger('cronjob') email = Email.EmailDomain(db) email_domain = {} for dom_entry in email.list_email_domains(): email_domain[int(dom_entry['domain_id'])] = dom_entry['domain'] for no_exp_dom in email.list_email_domains_with_category(co.email_domain_category_noexport): del email_domain[int(no_exp_dom['domain_id'])] domains = email_domain.values() domains.sort() domain_dict = {} for domain in domains: domain_dict[domain.lower()] = True # Verify that domains have a MX-record. for arg in cereconf.LDAP_MAIL_DNS['dig_args']: zone = arg[0] if domain.endswith(zone) and not (domain in hosts_only_mx or domain in hosts): logger.error("email domain without MX defined: %s" % domain) # Valid email domains only requires MX if domain in hosts_only_mx: del hosts_only_mx[domain] for host in hosts_only_mx: logger.warn("MX defined but no A/AAAA record or valid email domain: %s" % host) def handle_domain_host(host): f.write("host: %s\n" % lower2host[host]) for cname in hosts[host]: if not domain_dict.has_key(cname): f.write("cn: %s\n" % lower2host[cname]) del cnames[cname] del hosts[host] dn_suffix = ldapconf('MAIL_DNS', 'dn') f.write(container_entry_string('MAIL_DNS')) for domain in domains: f.write("""dn: cn=%s,%s objectClass: uioHost cn: %s """ % (domain, dn_suffix, domain)) domain = domain.lower() if cnames.has_key(domain): f.write("cn: %s\n" % lower2host[cnames[domain]]) handle_domain_host(cnames[domain]) elif hosts.has_key(domain): handle_domain_host(domain) f.write('\n') sorted_hosts = hosts.keys() sorted_hosts.sort() for host in sorted_hosts: f.write("""dn: host=%s,%s objectClass: uioHost host: %s cn: %s """ % (lower2host[host], dn_suffix, lower2host[host], lower2host[host])) for cname in hosts[host]: f.write("cn: %s\n" % lower2host[cname]) f.write('\n') end_ldif_outfile('MAIL_DNS', f)
def main(inargs=None): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '--user-file', type=text_type, dest='user_file', metavar='PATH', help='output file for users') parser.add_argument( '--user-spread', type=text_type, action='append', dest='user_spread', metavar='NAME', help='selection spread(s) for users') parser.add_argument( '--filegroup-file', type=text_type, dest='filegroup_file', metavar='PATH', help='output file for file groups') parser.add_argument( '--filegroup-spread', type=text_type, action='append', dest='filegroup_spread', metavar='NAME', help='selection spread(s) for file groups') parser.add_argument( '--netgroup-file', type=text_type, dest='netgroup_file', metavar='PATH', help='output file for net groups') parser.add_argument( '--netgroup-spread', type=text_type, action='append', dest='netgroup_spread', metavar='NAME', help='selection spread(s) for net groups') parser.add_argument( '--all', action='store_true', dest='all', help='write everything as configured in cereconf') Cerebrum.logutils.options.install_subparser(parser) args = parser.parse_args(inargs) got_file = args.user_file or args.filegroup_file or args.netgroup_file if args.all and got_file: parser.error('Cannot specify --all with --*-file') elif not args.all and not got_file: parser.error('Need one of --all or --*-file') Cerebrum.logutils.autoconf('cronjob', args) logger.info('Start of script %s', parser.prog) logger.debug('args: %r', args) fd = None if args.all: fd = ldif_outfile('POSIX') fd.write("\n") if cereconf.LDAP_POSIX.get('dn'): fd.write(container_entry_string('POSIX')) db = Factory.get('Database')() posixldif = Factory.get('PosixLDIF')( db=db, logger=logger, u_sprd=args.user_spread, g_sprd=args.filegroup_spread, n_sprd=args.netgroup_spread, fd=fd) for var, func, filepath in ( ('LDAP_USER', posixldif.user_ldif, args.user_file), ('LDAP_FILEGROUP', posixldif.filegroup_ldif, args.filegroup_file), ('LDAP_NETGROUP', posixldif.netgroup_ldif, args.netgroup_file)): if (args.all or filepath) and getattr(cereconf, var).get('dn'): func(filepath) elif filepath: parser.error("Missing 'dn' in cereconf.{}".format(var)) if fd: end_ldif_outfile('POSIX', fd) logger.info('End of script %s', parser.prog)
def write_mail_dns(): f = ldif_outfile('MAIL_DNS') hosts, cnames, lower2host, hosts_only_mx = get_hosts_and_cnames() db = Factory.get('Database')() co = Factory.get('Constants')(db) logger = Factory.get_logger('cronjob') email = Email.EmailDomain(db) email_domain = {} for dom_entry in email.list_email_domains(): email_domain[int(dom_entry['domain_id'])] = dom_entry['domain'] for no_exp_dom in email.list_email_domains_with_category( co.email_domain_category_noexport): del email_domain[int(no_exp_dom['domain_id'])] domains = email_domain.values() domains.sort() domain_dict = {} for domain in domains: domain_dict[domain.lower()] = True # Verify that domains have a MX-record. for arg in cereconf.LDAP_MAIL_DNS['dig_args']: zone = arg[0] if domain.endswith(zone) and not (domain in hosts_only_mx or domain in hosts): logger.error("email domain without MX defined: %s" % domain) # Valid email domains only requires MX if domain in hosts_only_mx: del hosts_only_mx[domain] for host in hosts_only_mx: logger.warn( "MX defined but no A/AAAA record or valid email domain: %s" % host) def handle_domain_host(host): f.write("host: %s\n" % lower2host[host]) for cname in hosts[host]: if not domain_dict.has_key(cname): f.write("cn: %s\n" % lower2host[cname]) del cnames[cname] del hosts[host] dn_suffix = ldapconf('MAIL_DNS', 'dn') f.write(container_entry_string('MAIL_DNS')) for domain in domains: f.write("""dn: cn=%s,%s objectClass: uioHost cn: %s """ % (domain, dn_suffix, domain)) domain = domain.lower() if cnames.has_key(domain): f.write("cn: %s\n" % lower2host[cnames[domain]]) handle_domain_host(cnames[domain]) elif hosts.has_key(domain): handle_domain_host(domain) f.write('\n') sorted_hosts = hosts.keys() sorted_hosts.sort() for host in sorted_hosts: f.write("""dn: host=%s,%s objectClass: uioHost host: %s cn: %s """ % (lower2host[host], dn_suffix, lower2host[host], lower2host[host])) for cname in hosts[host]: f.write("cn: %s\n" % lower2host[cname]) f.write('\n') end_ldif_outfile('MAIL_DNS', f)
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '--user-file', type=text_type, dest='user_file', metavar='PATH', help='output file for users') parser.add_argument( '--user-spread', type=text_type, action='append', dest='user_spread', metavar='NAME', help='selection spread(s) for users') parser.add_argument( '--filegroup-file', type=text_type, dest='filegroup_file', metavar='PATH', help='output file for file groups') parser.add_argument( '--filegroup-spread', type=text_type, action='append', dest='filegroup_spread', metavar='NAME', help='selection spread(s) for file groups') parser.add_argument( '--netgroup-file', type=text_type, dest='netgroup_file', metavar='PATH', help='output file for net groups') parser.add_argument( '--netgroup-spread', type=text_type, action='append', dest='netgroup_spread', metavar='NAME', help='selection spread(s) for net groups') parser.add_argument( '--all', action='store_true', dest='all', help='write everything as configured in cereconf') args = parser.parse_args() got_file = args.user_file or args.filegroup_file or args.netgroup_file if args.all and got_file: parser.error('Cannot specify --all with --*-file') elif not args.all and not got_file: parser.error('Need one of --all or --*-file') fd = None if args.all: fd = ldif_outfile('POSIX') fd.write("\n") if cereconf.LDAP_POSIX.get('dn'): fd.write(container_entry_string('POSIX')) db = Factory.get('Database')() posixldif = Factory.get('PosixLDIF')( db=db, logger=logger, u_sprd=args.user_spread, g_sprd=args.filegroup_spread, n_sprd=args.netgroup_spread, fd=fd) for var, func, filepath in ( ('LDAP_USER', posixldif.user_ldif, args.user_file), ('LDAP_FILEGROUP', posixldif.filegroup_ldif, args.filegroup_file), ('LDAP_NETGROUP', posixldif.netgroup_ldif, args.netgroup_file)): if (args.all or filepath) and getattr(cereconf, var).get('dn'): func(filepath) elif filepath: parser.error("Missing 'dn' in cereconf.{}".format(var)) if fd: end_ldif_outfile('POSIX', fd)