def generate_all(fname): """Generate user + group LDIF to fname. @type fname: str @param fname: The file where the ldif data will be written """ logger.debug("Generating ldif into %s", fname) out = ldif_outfile("ORG", fname) out.write(container_entry_string("ORG")) helper = LDIFHelper(logger) logger.debug("Generating user ldif...") out.write(container_entry_string("USER")) for user in helper.yield_users(): dn = user["dn"][0] del user["dn"] out.write(entry_string(dn, user, False)) end_ldif_outfile("USER", out, out) logger.debug("Generating group ldif...") out.write(container_entry_string("GROUP")) for group in helper.yield_groups(): dn = group["dn"][0] del group["dn"] out.write(entry_string(dn, group, False)) end_ldif_outfile("GROUP", out) logger.debug("Done with group ldif (all done)")
def generate_voip_clients(sink, addr_id2dn, *args): vc = VoipClient(db) const = Factory.get("Constants")() sink.write(container_entry_string('VOIP_CLIENT')) for entry in vc.list_voip_attributes(*args): voip_address_id = entry.pop("voip_address_id") if voip_address_id not in addr_id2dn: logger.debug( "voip client %s refers to voip_address %s, but the " "latter is not in the cache. Has %s been recently " "created?", repr(entry), voip_address_id, voip_address_id) continue entry['objectClass'] = ['top', 'sipClient'] entry['sipVoipAddressDN'] = addr_id2dn[voip_address_id] if entry["sipClientType"] == text_type( const.voip_client_type_softphone): attr = "uid" assert attr in entry elif entry["sipClientType"] == text_type( const.voip_client_type_hardphone): attr = "sipMacAddress" assert "uid" not in entry else: logger.warn("Aiee! Unknown voip_client type: %s (entry: %s)", entry["sipClientType"], repr(entry)) continue dn = "{}={},{}".format(attr, entry[attr], ldapconf('VOIP_CLIENT', 'dn', None)) sink.write(entry_string(dn, entry))
def generate_voip_clients(sink, addr_id2dn, *args): vc = VoipClient(db) const = Factory.get("Constants")() sink.write(container_entry_string('VOIP_CLIENT')) for entry in vc.list_voip_attributes(*args): voip_address_id = entry.pop("voip_address_id") if voip_address_id not in addr_id2dn: logger.debug("voip client %s refers to voip_address %s, but the " "latter is not in the cache. Has %s been recently " "created?", repr(entry), voip_address_id, voip_address_id) continue entry['objectClass'] = ['top', 'sipClient'] entry['sipVoipAddressDN'] = addr_id2dn[voip_address_id] if entry["sipClientType"] == text_type(const.voip_client_type_softphone): attr = "uid" assert attr in entry elif entry["sipClientType"] == text_type(const.voip_client_type_hardphone): attr = "sipMacAddress" assert "uid" not in entry else: logger.warn("Aiee! Unknown voip_client type: %s (entry: %s)", entry["sipClientType"], repr(entry)) continue dn = "{}={},{}".format(attr, entry[attr], ldapconf('VOIP_CLIENT', 'dn', None)) sink.write(entry_string(dn, entry))
def dump(self): fd = ldif_outfile('USER') fd.write(container_entry_string('USER')) noAuth = (None, None) for account_id, vlan_vpn in self.id2vlan_vpn.iteritems(): info = self.auth[account_id] uname = info[0] auth = info[1] ntAuth = self.md4_auth.get(account_id, noAuth)[1] if account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[account_id]) if qh.should_skip(): continue if qh.is_locked(): auth = ntAuth = None dn = ','.join(('uid=' + uname, self.user_dn)) entry = { 'objectClass': ['top', 'account', 'hiofRadiusAccount'], 'uid': (uname,), 'radiusTunnelType': ('13',), 'radiusTunnelMediumType': ('6',), 'radiusTunnelPrivateGroupId': (vlan_vpn[0],), 'radiusClass': (vlan_vpn[1],)} if auth: entry['objectClass'].append('simpleSecurityObject') entry['userPassword'] = ('{crypt}' + auth,) if ntAuth: entry['ntPassword'] = (ntAuth,) fd.write(entry_string(dn, entry, False)) end_ldif_outfile('USER', fd)
def generate_voip_clients(sink, addr_id2dn, encoding, *args): db = Factory.get("Database")() vc = VoipClient(db) const = Factory.get("Constants")() sink.write(container_entry_string("VOIP_CLIENT")) for entry in vc.list_voip_attributes(*args): voip_address_id = entry.pop("voip_address_id") if voip_address_id not in addr_id2dn: logger.debug( "voip client %s refers to voip_address %s, but the " "latter is not in the cache. Has %s been recently " "created?", repr(entry), voip_address_id, voip_address_id, ) continue entry["objectClass"] = ["top", "sipClient"] entry["sipVoipAddressDN"] = addr_id2dn[voip_address_id] if entry["sipClientType"] == str(const.voip_client_type_softphone): attr = "uid" assert attr in entry elif entry["sipClientType"] == str(const.voip_client_type_hardphone): attr = "sipMacAddress" assert "uid" not in entry else: logger.warn("Aiee! Unknown voip_client type: %s (entry: %s)", entry["sipClientType"], repr(entry)) continue dn = "%s=%s,%s" % (attr, entry[attr], ldapconf("VOIP_CLIENT", "dn", None)) sink.write(entry_string(object2encoding(dn, encoding), object2encoding(entry, encoding)))
def main(inargs=None): defaults = cereconf.LDAP_KURS parser = argparse.ArgumentParser(description="Generate course ldap tree", ) parser.add_argument( '--aktivitetfile', default=defaults.get('aktivitetfile'), required=not defaults.get('aktivitetfile'), help='Use edu activities from %(metavar)s (%(default)s)', metavar='xml-file', ) parser.add_argument( '--enhetfile', default=defaults.get('enhetfile'), required=not defaults.get('enhetfile'), help='Use edu units from %(metavar)s (%(default)s)', metavar='xml-file', ) parser.add_argument( '--emnefile', default=defaults.get('emnefile'), required=not defaults.get('emnefile'), help='Use subjects from %(metavar)s (%(default)s)', metavar='xml-file', ) parser.add_argument( '--picklefile', default=defaults.get('picklefile'), required=not defaults.get('picklefile'), help='Store course participation to %(metavar)s (%(default)s)', metavar='pickle-file', ) parser.add_argument( '--ldiffile', default=defaults.get('file'), required=not defaults.get('file'), help='Write courses LDIF to %(metavar)s (%(default)s)', metavar='ldif-file', ) Cerebrum.logutils.options.install_subparser(parser) args = parser.parse_args(inargs) Cerebrum.logutils.autoconf('cronjob', args) logger.info('Start %s', parser.prog) logger.debug('args: %r', args) db = Factory.get('Database')() cgi = CerebrumGroupInfo(db) sip = StudinfoParsers(args.emnefile, args.aktivitetfile, args.enhetfile) destfile = ldif_outfile('KURS', args.ldiffile) destfile.write(container_entry_string('KURS')) urn_dict = gen_undervisningsaktivitet(cgi, sip, destfile) urn_dict.update(gen_undervisningsenhet(cgi, sip, destfile)) end_ldif_outfile('KURS', destfile) owner_id2urn = gen_owner_id2urn(db, urn_dict) dump_pickle_file(args.picklefile, owner_id2urn) logger.info('Done %s', parser.prog)
def generate_automount(f): db = Factory.get('Database')() co = Factory.get('Constants')(db) d = Factory.get('Disk')(db) h = Factory.get('Host')(db) hosts = [] disks = d.list(filter_expired=True) for disk in disks: if disk['count'] <= 0: # Skip disks with no users continue if disk['host_id'] not in hosts: hosts.append(disk['host_id']) h_id2name = {} # TBD: any point in filtering? does it just consume more resources than # listing all hosts? for host in h.search(host_id=hosts): h_id2name[host['host_id']] = host['name'] paths = {} for disk in disks: if disk['count'] <= 0: # Skip disks with no users continue path = disk['path'].split('/') if not ((path[1], path[2])) in paths.keys(): paths[(path[1], path[2])] = disk['host_id'] f.write(container_entry_string('AUTOMOUNT_MASTER')) for path in paths: entry = {} entry['objectClass'] = ['top', 'automount'] dn = "cn=%s,%s" % ("/%s/%s" % (path[0], path[1]), ldapconf('AUTOMOUNT_MASTER', 'dn', None)) entry['automountInformation'] = "ldap:ou=auto.%s-%s,%s" % ( path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) f.write(entry_string(dn, entry)) entry = {} entry['objectClass'] = ['top', 'automountMap'] dn = "ou=auto.%s-%s,%s" % (path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) f.write(entry_string(dn, entry)) entry = {} entry['objectClass'] = ['top', 'automount'] dn = "cn=/,ou=auto.%s-%s,%s" % (path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) dns = 'uio.no' if path[0] == 'ifi': dns = 'ifi.uio.no' entry[ 'automountInformation'] = "-fstype=nfs,tcp,vers=3,rw,intr,hard,nodev,nosuid,noacl %s.%s:/%s/%s/&" % ( h_id2name[paths[path]], dns, path[0], path[1]) f.write(entry_string(dn, entry))
def generate_automount(f): db = Factory.get("Database")() co = Factory.get("Constants")(db) d = Factory.get("Disk")(db) h = Factory.get("Host")(db) hosts = [] disks = d.list(filter_expired=True) for disk in disks: if disk["count"] <= 0: # Skip disks with no users continue if disk["host_id"] not in hosts: hosts.append(disk["host_id"]) h_id2name = {} # TBD: any point in filtering? does it just consume more resources than # listing all hosts? for host in h.search(host_id=hosts): h_id2name[host["host_id"]] = host["name"] paths = {} for disk in disks: if disk["count"] <= 0: # Skip disks with no users continue path = disk["path"].split("/") if not ((path[1], path[2])) in paths.keys(): paths[(path[1], path[2])] = disk["host_id"] f.write(container_entry_string("AUTOMOUNT_MASTER")) for path in paths: entry = {} entry["objectClass"] = ["top", "automount"] dn = "cn=%s,%s" % ("/%s/%s" % (path[0], path[1]), ldapconf("AUTOMOUNT_MASTER", "dn", None)) entry["automountInformation"] = "ldap:ou=auto.%s-%s,%s" % (path[1], path[0], ldapconf("AUTOMOUNT", "dn", None)) f.write(entry_string(dn, entry)) entry = {} entry["objectClass"] = ["top", "automountMap"] dn = "ou=auto.%s-%s,%s" % (path[1], path[0], ldapconf("AUTOMOUNT", "dn", None)) f.write(entry_string(dn, entry)) entry = {} entry["objectClass"] = ["top", "automount"] dn = "cn=/,ou=auto.%s-%s,%s" % (path[1], path[0], ldapconf("AUTOMOUNT", "dn", None)) dns = "uio.no" if path[0] == "ifi": dns = "ifi.uio.no" entry["automountInformation"] = "-fstype=nfs,tcp,vers=3,rw,intr,hard,nodev,nosuid,noacl %s.%s:/%s/%s/&" % ( h_id2name[paths[path]], dns, path[0], path[1], ) f.write(entry_string(dn, entry))
def generate_automount(f): db = Factory.get('Database')() d = Factory.get('Disk')(db) h = Factory.get('Host')(db) hosts = [] disks = d.list(filter_expired=True) for disk in disks: if disk['count'] <= 0: # Skip disks with no users continue if disk['host_id'] not in hosts: hosts.append(disk['host_id']) h_id2name = {} for host in h.search(host_id=hosts): h_id2name[host['host_id']] = host['name'] paths = {} for disk in disks: if disk['count'] <= 0: # Skip disks with no users continue path = disk['path'].split('/') if not ((path[1], path[2])) in paths.keys(): paths[(path[1], path[2])] = disk['host_id'] f.write(container_entry_string('AUTOMOUNT_MASTER')) for path in paths: entry = {} entry['objectClass'] = ['top', 'automount'] dn = "cn={},{}".format( "/{}/{}".format(path[0], path[1]), ldapconf('AUTOMOUNT_MASTER', 'dn', None)) entry['automountInformation'] = "ldap:ou=auto.{}-{},{}".format( path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) f.write(entry_string(dn, entry)) entry = {} entry['objectClass'] = ['top', 'automountMap'] dn = "ou=auto.{}-{},{}".format( path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) f.write(entry_string(dn, entry)) entry = {} entry['objectClass'] = ['top', 'automount'] dn = "cn=/,ou=auto.{}-{},{}".format( path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) dns = 'uio.no' if path[0] == 'ifi': dns = 'ifi.uio.no' automount_opts = ("-fstype=nfs,tcp,vers=3,rw,intr,hard,nodev," "nosuid,noacl {}.{}:/{}/{}/&") entry['automountInformation'] = automount_opts.format( h_id2name[paths[path]], dns, path[0], path[1]) f.write(entry_string(dn, entry))
def generate_voip_addresses(sink, *args): va = VoipAddress(db) sink.write(container_entry_string('VOIP_ADDRESS')) addr_id2dn = dict() for entry in va.list_voip_attributes(*args): entry['objectClass'] = ['top', 'voipAddress'] dn = "voipOwnerId={},{}".format(entry['voipOwnerId'], ldapconf('VOIP_ADDRESS', 'dn', None)) entity_id = entry.pop("entity_id") addr_id2dn[entity_id] = dn if not entry.get("cn"): entry["cn"] = () sink.write(entry_string(dn, entry)) return addr_id2dn
def generate_all(fname): """Write user + group LDIF to fname.""" out = ldif_outfile("ORG", fname) logger.debug('writing to %r', out) out.write(container_entry_string("ORG")) helper = LDIFHelper(logger.getChild('LDIFHelper')) logger.info("Generating user ldif...") out.write(container_entry_string("USER")) for user in helper.yield_users(): dn = user["dn"][0] del user["dn"] out.write(entry_string(dn, user, False)) end_ldif_outfile("USER", out, out) logger.debug("Generating group ldif...") out.write(container_entry_string("GROUP")) for group in helper.yield_groups(): dn = group["dn"][0] del group["dn"] out.write(entry_string(dn, group, False)) end_ldif_outfile("GROUP", out)
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '-o', '--output', type=text_type, dest='output', help='output file') args = parser.parse_args() f = ldif_outfile('AUTOMOUNT', args.output) logger.info('Starting automount export to %s', f.name) f.write(container_entry_string('AUTOMOUNT')) generate_automount(f) f.close() logger.info('Done')
def generate_voip_addresses(sink, encoding, *args): db = Factory.get("Database")() va = VoipAddress(db) sink.write(container_entry_string("VOIP_ADDRESS")) addr_id2dn = dict() for entry in va.list_voip_attributes(*args): entry["objectClass"] = ["top", "voipAddress"] dn = "voipOwnerId=%s,%s" % (entry["voipOwnerId"], ldapconf("VOIP_ADDRESS", "dn", None)) entity_id = entry.pop("entity_id") addr_id2dn[entity_id] = dn entry = object2encoding(entry, encoding) if not entry.get("cn"): entry["cn"] = () sink.write(entry_string(object2encoding(dn, encoding), entry)) return addr_id2dn
def generate_voip_addresses(sink, encoding, *args): db = Factory.get("Database")() va = VoipAddress(db) sink.write(container_entry_string('VOIP_ADDRESS')) addr_id2dn = dict() for entry in va.list_voip_attributes(*args): entry['objectClass'] = ['top', 'voipAddress'] dn = "voipOwnerId=%s,%s" % (entry['voipOwnerId'], ldapconf('VOIP_ADDRESS', 'dn', None)) entity_id = entry.pop("entity_id") addr_id2dn[entity_id] = dn entry = object2encoding(entry, encoding) if not entry.get("cn"): entry["cn"] = () sink.write(entry_string(object2encoding(dn, encoding), entry)) return addr_id2dn
def write_subnet_ldif(): DN = ldapconf('SUBNETS', 'dn') startAttr, endAttr, objectClasses = ldapconf('SUBNETS', 'rangeSchema') objectClasses = ('top', 'ipNetwork') + tuple(objectClasses) db = Factory.get('Database')() f = ldif_outfile('SUBNETS') f.write(container_entry_string('SUBNETS')) for row in Subnet(db).search(): cn = "%s/%s" % (row['subnet_ip'], row['subnet_mask']) desc = row['description'] f.write(entry_string("cn=%s,%s" % (cn, DN), { 'objectClass': objectClasses, 'description': (desc and (iso2utf(desc),) or ()), 'ipNetworkNumber': (row['subnet_ip'],), 'ipNetmaskNumber': (netmask_to_ip(row['subnet_mask']),), startAttr: (str(int(row['ip_min'])),), endAttr: (str(int(row['ip_max'])),)})) end_ldif_outfile('SUBNETS', f)
def main(inargs=None): parser = argparse.ArgumentParser( description="Generate a group tree for LDAP", ) parser.add_argument( '--ldiffile', help='Write groups to the ldif-file %(metavar)', metavar='file', ) parser.add_argument( '--picklefile', help='Write group memberships to the pickle-file %(metavar)s', metavar='file', ) Cerebrum.logutils.options.install_subparser(parser) args = parser.parse_args(inargs) if not any((args.ldiffile, args.picklefile)): parser.error('Must use --ldiffile or --picklefile') Cerebrum.logutils.autoconf('cronjob', args) logger.info('Start %s', parser.prog) logger.debug('args: %r', args) ldiffile = args.ldiffile picklefile = args.picklefile db = Factory.get('Database')() dn = ldapconf('GROUP', 'dn') logger.info('Generating LDIF...') destfile = ldif_outfile('GROUP', ldiffile) destfile.write(container_entry_string('GROUP')) mbr2grp = dump_ldif(db, dn, destfile) end_ldif_outfile('GROUP', destfile) logger.info('Wrote LDIF to %r', ldiffile) logger.info('Generating pickle dump...') tmpfname = picklefile + '.tmp' pickle.dump(mbr2grp, open(tmpfname, 'wb'), pickle.HIGHEST_PROTOCOL) os.rename(tmpfname, picklefile) logger.info('Wrote pickle file to %r', picklefile) logger.info('Done %s', parser.prog)
def dump(self): fd = ldif_outfile('RADIUS') logger.debug('writing to %s', repr(fd)) fd.write(container_entry_string('RADIUS')) logger.info('Generating export...') for account_id, vlan_vpn in self.id2vlan_vpn.iteritems(): try: uname = self.account_names[account_id] except KeyError: logger.error('No account name for account_id=%r', account_id) continue try: auth = self.user_password.get(account_id) except LookupError: auth = None try: ntauth = self.nt_password.get(account_id) except LookupError: ntauth = None if account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[account_id]) if qh.should_skip(): continue if qh.is_locked(): auth = ntauth = None dn = ','.join(('uid=' + uname, self.user_dn)) entry = { 'objectClass': ['top', 'account', 'uiaRadiusAccount'], 'uid': (uname, ), 'radiusTunnelType': ('VLAN', ), 'radiusTunnelMediumType': ('IEEE-802', ), 'radiusTunnelPrivateGroupId': (vlan_vpn[0], ), 'radiusClass': (vlan_vpn[1], ), } if auth: entry['objectClass'].append('simpleSecurityObject') entry['userPassword'] = auth if ntauth: entry['ntPassword'] = (ntauth, ) fd.write(entry_string(dn, entry, False)) end_ldif_outfile('RADIUS', fd)
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-o', '--output', type=text_type, dest='output', help='output file') args = parser.parse_args() f = ldif_outfile('VOIP', args.output) logger.info('Starting VoIP LDIF export to %s', f.name) f.write(container_entry_string('VOIP')) logger.info('Fetching persons and primary accounts') persons, primary2pid, sysadm_aid = get_voip_persons_and_primary_accounts() logger.info('Fetching VoIP addresses') addr_id2dn = generate_voip_addresses(f, persons, primary2pid, sysadm_aid) logger.info('Fetching VoIP clients') generate_voip_clients(f, addr_id2dn, persons, primary2pid, sysadm_aid) f.close() logger.info('Done')
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '-o', '--output', type=text_type, dest='output', help='output file') args = parser.parse_args() f = ldif_outfile('VOIP', args.output) logger.info('Starting VoIP LDIF export to %s', f.name) f.write(container_entry_string('VOIP')) logger.info('Fetching persons and primary accounts') persons, primary2pid, sysadm_aid = get_voip_persons_and_primary_accounts() logger.info('Fetching VoIP addresses') addr_id2dn = generate_voip_addresses(f, persons, primary2pid, sysadm_aid) logger.info('Fetching VoIP clients') generate_voip_clients(f, addr_id2dn, persons, primary2pid, sysadm_aid) f.close() logger.info('Done')
def write_subnet_ldif(): DN = ldapconf('SUBNETS', 'dn') startAttr, endAttr, objectClasses = ldapconf('SUBNETS', 'rangeSchema') objectClasses = ('top', 'ipNetwork') + tuple(objectClasses) db = Factory.get('Database')() f = ldif_outfile('SUBNETS') f.write(container_entry_string('SUBNETS')) for row in Subnet(db).search(): cn = "%s/%s" % (row['subnet_ip'], row['subnet_mask']) desc = row['description'] f.write( entry_string( "cn=%s,%s" % (cn, DN), { 'objectClass': objectClasses, 'description': (desc and (iso2utf(desc), ) or ()), 'ipNetworkNumber': (row['subnet_ip'], ), 'ipNetmaskNumber': (netmask_to_ip(row['subnet_mask']), ), startAttr: (str(int(row['ip_min'])), ), endAttr: (str(int(row['ip_max'])), ) })) end_ldif_outfile('SUBNETS', f)
def main(): try: opts, args = getopt.getopt(sys.argv[1:], '', [ 'help', 'aktivitetfile=', 'enhetfile=', 'emnefile=', 'ldiffile=', 'picklefile=']) except getopt.GetoptError: usage(1) aktivitetfile, enhetfile, emnefile, picklefile, ldiffile = map( cereconf.LDAP_KURS.get, ('aktivitetfile', 'enhetfile', 'emnefile', 'picklefile', 'file')) for opt, val in opts: if opt in ('--help',): usage() elif opt in ('--aktivitetfile',): aktivitetfile = val elif opt in ('--enhetfile',): enhetfile = val elif opt in ('--emnefile',): emnefile = val elif opt in ('--picklefile',): picklefile = val elif opt in ('--ldiffile',): ldiffile = val if not (aktivitetfile and enhetfile and emnefile and picklefile and ldiffile) or args: usage(1) logger.info('Start') cgi = CerebrumGroupInfo() sip = StudinfoParsers(emnefile, aktivitetfile, enhetfile) destfile = ldif_outfile('KURS', ldiffile) destfile.write(container_entry_string('KURS')) urn_dict = gen_undervisningsaktivitet(cgi, sip, destfile) urn_dict.update(gen_undervisningsenhet(cgi, sip, destfile)) end_ldif_outfile('KURS', destfile) owner_id2urn = gen_owner_id2urn(urn_dict) dump_pickle_file(picklefile, owner_id2urn) logger.info('Done')
def main(): try: opts, args = getopt.getopt(sys.argv[1:], 'h', [ 'help', 'ldiffile=', 'picklefile=']) except getopt.GetoptError: usage(1) for opt, val in opts: if opt in ('--help',): usage() elif opt in ('--picklefile',): picklefile = val elif opt in ('--ldiffile',): ldiffile = val if not (picklefile and ldiffile) or args: usage(1) destfile = ldif_outfile('GROUP', ldiffile) destfile.write(container_entry_string('GROUP')) dump_ldif(destfile) end_ldif_outfile('GROUP', destfile) tmpfname = picklefile + '.tmp' pickle.dump(mbr2grp, open(tmpfname, 'wb'), pickle.HIGHEST_PROTOCOL) os.rename(tmpfname, picklefile)
def main(): try: opts, args = getopt.getopt(sys.argv[1:], 'h', ['help', 'ldiffile=', 'picklefile=']) except getopt.GetoptError: usage(1) for opt, val in opts: if opt in ('--help', ): usage() elif opt in ('--picklefile', ): picklefile = val elif opt in ('--ldiffile', ): ldiffile = val if not (picklefile and ldiffile) or args: usage(1) destfile = ldif_outfile('GROUP', ldiffile) destfile.write(container_entry_string('GROUP')) dump_ldif(destfile) end_ldif_outfile('GROUP', destfile) tmpfname = picklefile + '.tmp' pickle.dump(mbr2grp, open(tmpfname, 'wb'), pickle.HIGHEST_PROTOCOL) os.rename(tmpfname, picklefile)
ofile = None try: opts, args = getopt.getopt(sys.argv[1:], "ho:", ("help", "outfile=")) except getopt.GetoptError, e: usage(str(e)) if args: usage("Invalid arguments: " + " ".join(args)) for opt, val in opts: if opt in ("-o", "--outfile"): ofile = val else: usage() output_encoding = "utf-8" f = ldif_outfile("AUTOMOUNT", ofile) f.write(container_entry_string("AUTOMOUNT")) generate_automount(f) f.close() def usage(err=0): if err: print >>sys.stderr, err print >>sys.stderr, __doc__ sys.exit(bool(err)) if __name__ == "__main__": main()
def write_mail_dns(): f = ldif_outfile('MAIL_DNS') hosts, cnames, lower2host, hosts_only_mx = get_hosts_and_cnames() db = Factory.get('Database')() co = Factory.get('Constants')(db) logger = Factory.get_logger('cronjob') email = Email.EmailDomain(db) email_domain = {} for dom_entry in email.list_email_domains(): email_domain[int(dom_entry['domain_id'])] = dom_entry['domain'] for no_exp_dom in email.list_email_domains_with_category(co.email_domain_category_noexport): del email_domain[int(no_exp_dom['domain_id'])] domains = email_domain.values() domains.sort() domain_dict = {} for domain in domains: domain_dict[domain.lower()] = True # Verify that domains have a MX-record. for arg in cereconf.LDAP_MAIL_DNS['dig_args']: zone = arg[0] if domain.endswith(zone) and not (domain in hosts_only_mx or domain in hosts): logger.error("email domain without MX defined: %s" % domain) # Valid email domains only requires MX if domain in hosts_only_mx: del hosts_only_mx[domain] for host in hosts_only_mx: logger.warn("MX defined but no A/AAAA record or valid email domain: %s" % host) def handle_domain_host(host): f.write("host: %s\n" % lower2host[host]) for cname in hosts[host]: if not domain_dict.has_key(cname): f.write("cn: %s\n" % lower2host[cname]) del cnames[cname] del hosts[host] dn_suffix = ldapconf('MAIL_DNS', 'dn') f.write(container_entry_string('MAIL_DNS')) for domain in domains: f.write("""dn: cn=%s,%s objectClass: uioHost cn: %s """ % (domain, dn_suffix, domain)) domain = domain.lower() if cnames.has_key(domain): f.write("cn: %s\n" % lower2host[cnames[domain]]) handle_domain_host(cnames[domain]) elif hosts.has_key(domain): handle_domain_host(domain) f.write('\n') sorted_hosts = hosts.keys() sorted_hosts.sort() for host in sorted_hosts: f.write("""dn: host=%s,%s objectClass: uioHost host: %s cn: %s """ % (lower2host[host], dn_suffix, lower2host[host], lower2host[host])) for cname in hosts[host]: f.write("cn: %s\n" % lower2host[cname]) f.write('\n') end_ldif_outfile('MAIL_DNS', f)
try: opts, args = getopt.getopt(sys.argv[1:], "ho:", ("help", "outfile=")) except getopt.GetoptError, e: usage(str(e)) if args: usage("Invalid arguments: " + " ".join(args)) for opt, val in opts: if opt in ("-o", "--outfile"): ofile = val else: usage() output_encoding = "utf-8" f = ldif_outfile('VOIP', ofile) f.write(container_entry_string('VOIP')) voippersons, primary2pid, sysadm_aid = get_voip_persons_and_primary_accounts( ) addr_id2dn = generate_voip_addresses(f, output_encoding, voippersons, primary2pid, sysadm_aid) generate_voip_clients(f, addr_id2dn, output_encoding, voippersons, primary2pid, sysadm_aid) f.close() # end main def usage(err=0): if err: print >> sys.stderr, err
ofile = None try: opts, args = getopt.getopt(sys.argv[1:], "ho:", ("help", "outfile=")) except getopt.GetoptError, e: usage(str(e)) if args: usage("Invalid arguments: " + " ".join(args)) for opt, val in opts: if opt in ("-o", "--outfile"): ofile = val else: usage() output_encoding = "utf-8" f = ldif_outfile('AUTOMOUNT', ofile) f.write(container_entry_string('AUTOMOUNT')) generate_automount(f) f.close() def usage(err=0): if err: print >> sys.stderr, err print >> sys.stderr, __doc__ sys.exit(bool(err)) if __name__ == '__main__': main()
def write_ldif(): mail_targ = Email.EmailTarget(db) counter = 0 curr = now() ldap.read_pending_moves() f.write(container_entry_string('MAIL')) for row in mail_targ.list_email_targets_ext(): t = int(row['target_id']) if verbose > 1: logger.debug("Processing target id=%d", t) if t not in ldap.targ2addr: # There are no addresses for this target; hence, no mail # can reach it. Move on. if verbose > 1: logger.debug("No addresses for target id=%s. Moving on.", t) continue tt = int(row['target_type']) et = row['target_entity_type'] if et is not None: et = int(et) ei = row['target_entity_id'] if ei is not None: ei = int(ei) alias = row['alias_value'] run_as_id = row['using_uid'] if run_as_id is not None: run_as_id = int(run_as_id) counter += 1 if verbose and (counter % 5000) == 0: logger.debug("done %d list_email_targets(): %d sec.", counter, now() - curr) target = "" uid = "" rest = "" # The structure is decided by what target-type the # target is (class EmailConstants in Email.py): tt = co.EmailTarget(int(tt)) if verbose > 1: logger.debug("Target id=%s is of type %s", t, tt) if tt == co.email_target_account: # Target is the local delivery defined for the Account whose # account_id == email_target.target_entity_id. target = "" if et == co.entity_account: if ei in ldap.acc2name: target = ldap.acc2name[ei] else: logger.warn("Target id=%s (type %s): no user id=%s found", t, tt, ei) continue else: logger.warn( "Target id=%s (type %s): wrong entity type: %s " "(entity_id=%s)", t, tt, et, ei) continue # Find quota-settings: if t in ldap.targ2quota: soft, hard = ldap.targ2quota[t] rest += "softQuota: %s\n" % soft rest += "hardQuota: %s\n" % hard # Find vacations-settings: if t in ldap.targ2vacation: txt, start, end = ldap.targ2vacation[t] rest += "tripnote:: %s\n" % \ base64.encodestring(txt or "<No message>\n" ).replace("\n", "") rest += "tripnoteActive: TRUE\n" # See if e-mail delivery should be suspended. # We do try/raise/except to support what might be implemented # at other institutions. try: if cereconf.LDAP_INST != "uio": raise AttributeError except AttributeError: if ei in ldap.pending: rest += "mailPause: TRUE\n" # Does the event log have an unprocessed primary email change for # this email target? # pending_primary_email is populated by EmailLDAPUiOMixin if (hasattr(ldap, 'pending_primary_email') and t in ldap.pending_primary_email): # maybe the event has been processed by now? pending_event = False for event_id in ldap.pending_primary_email[t]: try: db.get_event(event_id=event_id) pending_event = True except Errors.NotFoundError: continue if pending_event: rest += "mailPausePendingEvent: TRUE\n" # Any server info? rest += dict_to_ldif_string(ldap.get_server_info(row)) elif tt == co.email_target_deleted: # Target type for addresses that are no longer working, but # for which it is useful to include of a short custom text in # the error message returned to the sender. The text # is taken from email_target.alias_value if et == co.entity_account: if ei in ldap.acc2name: target = ldap.acc2name[ei] if alias: rest += "forwardDestination: %s\n" % alias elif tt == co.email_target_forward: # Target is a pure forwarding mechanism; local deliveries # will only occur as indirect deliveries to the addresses # forwarded to. Both email_target.target_entity_id and # email_target.alias_value should be NULL, as they are # ignored. The email address(es) to forward to is taken # from table email_forward. pass elif tt in (co.email_target_pipe, co.email_target_RT, co.email_target_file, co.email_target_Sympa): # Target is a shell pipe. The command (and args) to pipe mail # into is gathered from email_target.alias_value. Iff # email_target.target_entity_id is set and belongs to an Account, # deliveries to this target will be run as that account. # or # Target is a file. The absolute path of the file is gathered # from email_target.alias_value. Iff email_target.target_entity_id # is set and belongs to an Account, deliveries to this target # will be run as that account. # or # Target is a Sympa mailing list. The command (and args) # to pipe mail into is gathered from email_target.alias_value. # Iff email_target.target_entity_id is set and belongs to an # Account, deliveries to this target will be run as that # account. if alias is None: logger.warn("Target id=%s (type %s) needs an alias_value", t, tt) continue if run_as_id is not None: if run_as_id in ldap.acc2name: uid = ldap.acc2name[run_as_id] else: logger.warn("Target id=%s (type %s) no user id=%s found", t, tt, ei) continue elif tt == co.email_target_multi: # Target is the set of `account`-type targets corresponding to # the Accounts that are first-level members of the Group that # has group_id == email_target.target_entity_id. if et == co.entity_group: try: addrs, missing = ldap.get_multi_target(ei, ignore_missing=True) except ValueError, e: logger.warn("Target id=%s (type %s): %s", t, tt, e) continue for addr in addrs: rest += "forwardDestination: %s\n" % addr for addr in missing: logger.warn( "Target id=%s (type %s): " "Multitarget group id %s: " "account %s has no primary address", t, tt, ei, addr) else: # A 'multi' target with no forwarding; seems odd. logger.warn("Target id=%s (type %s) no forwarding found", t, tt) continue else: # We don't want to log errors for distributiong groups. # This is really a bad hack. This LDIF generator should # be re-written in a way that lets us define desired functionality # in a non-hackis-way. try: if tt == co.email_target_dl_group: continue except AttributeError: pass # The target-type isn't known to this script. logger.error("Wrong target-type in target id=%s: %s", t, tt) continue f.write("dn: cn=d%s,%s\n" % (t, mail_dn)) f.write("objectClass: mailAddr\n") f.write("cn: d%s\n" % t) f.write(dict_to_ldif_string(ldap.get_target_info(row))) if uid: f.write("uid: %s\n" % uid) if rest: f.write(rest) # Find primary mail-address: primary_address = None if t in ldap.targ2prim: if ldap.targ2prim[t] in ldap.aid2addr: primary_address = ldap.aid2addr[ldap.targ2prim[t]] f.write("defaultMailAddress: %s\n" % primary_address) else: logger.warning( "Strange: target id=%d, targ2prim[t]: %d, but no aid2addr", t, ldap.targ2prim[t]) # Find addresses for target: for a in ldap.targ2addr[t]: f.write("mail: %s\n" % a) # Find forward-settings: if t in ldap.targ2forward: if tt == co.email_target_account and t in ldap.targ2localdelivery: if primary_address: f.write("forwardDestination: %s\n" % primary_address) else: logger.warning( "Missing primary address when setting local delivery " "for account_id:%s target_id:%s", ldap.targ2prim.get(t), t) for addr in ldap.targ2forward[t]: # Skip local forward addresses when the account is deleted, # else they will create an unnecessary bounce message. if tt == co.email_target_deleted and addr in ldap.targ2addr[t]: continue f.write("forwardDestination: %s\n" % addr) # Find spam-settings: if t in ldap.targ2spam: level, action = ldap.targ2spam[t] f.write("spamLevel: %s\n" % level) f.write("spamAction: %s\n" % action) else: # Set default-settings. f.write("spamLevel: %s\n" % default_spam_level) f.write("spamAction: %s\n" % default_spam_action) # Filters for a in ldap.targ2filter[t]: f.write("mailFilter: %s\n" % a) # Populate auth-data: if auth and tt == co.email_target_account: if ei in ldap.e_id2passwd: passwd = ldap.e_id2passwd[ei] if not passwd: passwd = "*invalid" f.write("userPassword: {crypt}%s\n" % passwd) else: logger.error("No auth-data for user: %s\n" % (target or ei)) misc = ldap.get_misc(row) if misc: f.write("%s\n" % misc) f.write("\n")
try: opts, args = getopt.getopt(sys.argv[1:], "ho:", ("help", "outfile=")) except getopt.GetoptError, e: usage(str(e)) if args: usage("Invalid arguments: " + " ".join(args)) for opt, val in opts: if opt in ("-o", "--outfile"): ofile = val else: usage() output_encoding = "utf-8" f = ldif_outfile("VOIP", ofile) f.write(container_entry_string("VOIP")) voippersons, primary2pid, sysadm_aid = get_voip_persons_and_primary_accounts() addr_id2dn = generate_voip_addresses(f, output_encoding, voippersons, primary2pid, sysadm_aid) generate_voip_clients(f, addr_id2dn, output_encoding, voippersons, primary2pid, sysadm_aid) f.close() # end main def usage(err=0): if err: print >>sys.stderr, err print >>sys.stderr, __doc__ sys.exit(bool(err))
def write_ldif(): mail_targ = Email.EmailTarget(db) counter = 0 curr = now() ldap.read_pending_moves() f.write(container_entry_string('MAIL')) for row in mail_targ.list_email_targets_ext(): t = int(row['target_id']) if verbose > 1: logger.debug("Processing target id=%d", t) if t not in ldap.targ2addr: # There are no addresses for this target; hence, no mail # can reach it. Move on. if verbose > 1: logger.debug("No addresses for target id=%s. Moving on.", t) continue tt = int(row['target_type']) et = row['target_entity_type'] if et is not None: et = int(et) ei = row['target_entity_id'] if ei is not None: ei = int(ei) alias = row['alias_value'] run_as_id = row['using_uid'] if run_as_id is not None: run_as_id = int(run_as_id) counter += 1 if verbose and (counter % 5000) == 0: logger.debug("done %d list_email_targets(): %d sec.", counter, now() - curr) target = "" uid = "" rest = "" # The structure is decided by what target-type the # target is (class EmailConstants in Email.py): tt = co.EmailTarget(int(tt)) if verbose > 1: logger.debug("Target id=%s is of type %s", t, tt) if tt == co.email_target_account: # Target is the local delivery defined for the Account whose # account_id == email_target.target_entity_id. target = "" if et == co.entity_account: if ei in ldap.acc2name: target = ldap.acc2name[ei] else: logger.warn("Target id=%s (type %s): no user id=%s found", t, tt, ei) continue else: logger.warn("Target id=%s (type %s): wrong entity type: %s " "(entity_id=%s)", t, tt, et, ei) continue # Find quota-settings: if t in ldap.targ2quota: soft, hard = ldap.targ2quota[t] rest += "softQuota: %s\n" % soft rest += "hardQuota: %s\n" % hard # Find vacations-settings: if t in ldap.targ2vacation: txt, start, end = ldap.targ2vacation[t] rest += "tripnote:: %s\n" % \ base64.encodestring(txt or "<No message>\n" ).replace("\n", "") rest += "tripnoteActive: TRUE\n" # See if e-mail delivery should be suspended. # We do try/raise/except to support what might be implemented # at other institutions. try: if cereconf.LDAP_INST != "uio": raise AttributeError except AttributeError: if ei in ldap.pending: rest += "mailPause: TRUE\n" # Does the event log have an unprocessed primary email change for this email target? # pending_primary_email is populated by EmailLDAPUiOMixin if hasattr(ldap, 'pending_primary_email') and t in ldap.pending_primary_email: # maybe the event has been processed by now? pending_event = False for event_id in ldap.pending_primary_email[t]: try: db.get_event(event_id=event_id) pending_event = True except Errors.NotFoundError: continue if pending_event: rest += "mailPausePendingEvent: TRUE\n" # Any server info? rest += dict_to_ldif_string(ldap.get_server_info(row)) elif tt == co.email_target_deleted: # Target type for addresses that are no longer working, but # for which it is useful to include of a short custom text in # the error message returned to the sender. The text # is taken from email_target.alias_value if et == co.entity_account: if ei in ldap.acc2name: target = ldap.acc2name[ei] if alias: rest += "forwardDestination: %s\n" % alias elif tt == co.email_target_forward: # Target is a pure forwarding mechanism; local deliveries # will only occur as indirect deliveries to the addresses # forwarded to. Both email_target.target_entity_id and # email_target.alias_value should be NULL, as they are # ignored. The email address(es) to forward to is taken # from table email_forward. pass elif tt in (co.email_target_pipe, co.email_target_RT, co.email_target_file, co.email_target_Mailman, co.email_target_Sympa): # Target is a shell pipe. The command (and args) to pipe mail # into is gathered from email_target.alias_value. Iff # email_target.target_entity_id is set and belongs to an Account, # deliveries to this target will be run as that account. # or # Target is a file. The absolute path of the file is gathered # from email_target.alias_value. Iff email_target.target_entity_id # is set and belongs to an Account, deliveries to this target # will be run as that account. # or # Target is a Mailman or Sympa mailing list. The command (and args) # to pipe mail into is gathered from email_target.alias_value. # Iff email_target.target_entity_id is set and belongs to an # Account, deliveries to this target will be run as that # account. if alias == None: logger.warn("Target id=%s (type %s) needs an alias_value", t, tt) continue if run_as_id is not None: if run_as_id in ldap.acc2name: uid = ldap.acc2name[run_as_id] else: logger.warn("Target id=%s (type %s) no user id=%s found", t, tt, ei) continue elif tt == co.email_target_multi: # Target is the set of `account`-type targets corresponding to # the Accounts that are first-level members of the Group that # has group_id == email_target.target_entity_id. if et == co.entity_group: try: addrs, missing = ldap.get_multi_target(ei, ignore_missing=True) except ValueError, e: logger.warn("Target id=%s (type %s): %s", t, tt, e) continue for addr in addrs: rest += "forwardDestination: %s\n" % addr for addr in missing: logger.warn("Target id=%s (type %s): " "Multitarget group id %s: " "account %s has no primary address", t, tt, ei, addr) else: # A 'multi' target with no forwarding; seems odd. logger.warn("Target id=%s (type %s) no forwarding found", t, tt) continue else: # We don't want to log errors for distributiong groups. # This is really a bad hack. This LDIF generator should # be re-written in a way that lets us define desired functionality # in a non-hackis-way. try: if tt == co.email_target_dl_group: continue except AttributeError: pass # The target-type isn't known to this script. logger.error("Wrong target-type in target id=%s: %s", t, tt) continue f.write("dn: cn=d%s,%s\n" % (t, mail_dn)) f.write("objectClass: mailAddr\n") f.write("cn: d%s\n" % t) f.write(dict_to_ldif_string(ldap.get_target_info(row))) if uid: f.write("uid: %s\n" % uid) if rest: f.write(rest) # Find primary mail-address: primary_address = None if t in ldap.targ2prim: if ldap.targ2prim[t] in ldap.aid2addr: primary_address = ldap.aid2addr[ldap.targ2prim[t]] f.write("defaultMailAddress: %s\n" % primary_address) else: logger.warning("Strange: target id=%d, targ2prim[t]: %d, but no aid2addr", t, ldap.targ2prim[t]) # Find addresses for target: for a in ldap.targ2addr[t]: f.write("mail: %s\n" % a) # Find forward-settings: if t in ldap.targ2forward: if tt == co.email_target_account and t in ldap.targ2localdelivery: if primary_address: f.write("forwardDestination: %s\n" % primary_address) else: logger.warning("Missing primary address when setting local delivery" " for account_id:%s target_id:%s", ldap.targ2prim.get(t), t) for addr in ldap.targ2forward[t]: # Skip local forward addresses when the account is deleted, else # they will create an unnecessary bounce message. if tt == co.email_target_deleted and addr in ldap.targ2addr[t]: continue f.write("forwardDestination: %s\n" % addr) # Find spam-settings: if t in ldap.targ2spam: level, action = ldap.targ2spam[t] f.write("spamLevel: %s\n" % level) f.write("spamAction: %s\n" % action) else: # Set default-settings. f.write("spamLevel: %s\n" % default_spam_level) f.write("spamAction: %s\n" % default_spam_action) # Filters for a in ldap.targ2filter[t]: f.write("mailFilter: %s\n" % a) # Populate auth-data: if auth and tt == co.email_target_account: if ei in ldap.e_id2passwd: passwd = ldap.e_id2passwd[ei] if not passwd: passwd = "*invalid" f.write("userPassword: {crypt}%s\n" % passwd) else: logger.error("No auth-data for user: %s\n" % (target or ei)) misc = ldap.get_misc(row) if misc: f.write("%s\n" % misc) f.write("\n")
def main(inargs=None): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '--user-file', type=text_type, dest='user_file', metavar='PATH', help='output file for users') parser.add_argument( '--user-spread', type=text_type, action='append', dest='user_spread', metavar='NAME', help='selection spread(s) for users') parser.add_argument( '--filegroup-file', type=text_type, dest='filegroup_file', metavar='PATH', help='output file for file groups') parser.add_argument( '--filegroup-spread', type=text_type, action='append', dest='filegroup_spread', metavar='NAME', help='selection spread(s) for file groups') parser.add_argument( '--netgroup-file', type=text_type, dest='netgroup_file', metavar='PATH', help='output file for net groups') parser.add_argument( '--netgroup-spread', type=text_type, action='append', dest='netgroup_spread', metavar='NAME', help='selection spread(s) for net groups') parser.add_argument( '--all', action='store_true', dest='all', help='write everything as configured in cereconf') Cerebrum.logutils.options.install_subparser(parser) args = parser.parse_args(inargs) got_file = args.user_file or args.filegroup_file or args.netgroup_file if args.all and got_file: parser.error('Cannot specify --all with --*-file') elif not args.all and not got_file: parser.error('Need one of --all or --*-file') Cerebrum.logutils.autoconf('cronjob', args) logger.info('Start of script %s', parser.prog) logger.debug('args: %r', args) fd = None if args.all: fd = ldif_outfile('POSIX') fd.write("\n") if cereconf.LDAP_POSIX.get('dn'): fd.write(container_entry_string('POSIX')) db = Factory.get('Database')() posixldif = Factory.get('PosixLDIF')( db=db, logger=logger, u_sprd=args.user_spread, g_sprd=args.filegroup_spread, n_sprd=args.netgroup_spread, fd=fd) for var, func, filepath in ( ('LDAP_USER', posixldif.user_ldif, args.user_file), ('LDAP_FILEGROUP', posixldif.filegroup_ldif, args.filegroup_file), ('LDAP_NETGROUP', posixldif.netgroup_ldif, args.netgroup_file)): if (args.all or filepath) and getattr(cereconf, var).get('dn'): func(filepath) elif filepath: parser.error("Missing 'dn' in cereconf.{}".format(var)) if fd: end_ldif_outfile('POSIX', fd) logger.info('End of script %s', parser.prog)
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '--user-file', type=text_type, dest='user_file', metavar='PATH', help='output file for users') parser.add_argument( '--user-spread', type=text_type, action='append', dest='user_spread', metavar='NAME', help='selection spread(s) for users') parser.add_argument( '--filegroup-file', type=text_type, dest='filegroup_file', metavar='PATH', help='output file for file groups') parser.add_argument( '--filegroup-spread', type=text_type, action='append', dest='filegroup_spread', metavar='NAME', help='selection spread(s) for file groups') parser.add_argument( '--netgroup-file', type=text_type, dest='netgroup_file', metavar='PATH', help='output file for net groups') parser.add_argument( '--netgroup-spread', type=text_type, action='append', dest='netgroup_spread', metavar='NAME', help='selection spread(s) for net groups') parser.add_argument( '--all', action='store_true', dest='all', help='write everything as configured in cereconf') args = parser.parse_args() got_file = args.user_file or args.filegroup_file or args.netgroup_file if args.all and got_file: parser.error('Cannot specify --all with --*-file') elif not args.all and not got_file: parser.error('Need one of --all or --*-file') fd = None if args.all: fd = ldif_outfile('POSIX') fd.write("\n") if cereconf.LDAP_POSIX.get('dn'): fd.write(container_entry_string('POSIX')) db = Factory.get('Database')() posixldif = Factory.get('PosixLDIF')( db=db, logger=logger, u_sprd=args.user_spread, g_sprd=args.filegroup_spread, n_sprd=args.netgroup_spread, fd=fd) for var, func, filepath in ( ('LDAP_USER', posixldif.user_ldif, args.user_file), ('LDAP_FILEGROUP', posixldif.filegroup_ldif, args.filegroup_file), ('LDAP_NETGROUP', posixldif.netgroup_ldif, args.netgroup_file)): if (args.all or filepath) and getattr(cereconf, var).get('dn'): func(filepath) elif filepath: parser.error("Missing 'dn' in cereconf.{}".format(var)) if fd: end_ldif_outfile('POSIX', fd)
def write_mail_dns(): f = ldif_outfile('MAIL_DNS') hosts, cnames, lower2host, hosts_only_mx = get_hosts_and_cnames() db = Factory.get('Database')() co = Factory.get('Constants')(db) logger = Factory.get_logger('cronjob') email = Email.EmailDomain(db) email_domain = {} for dom_entry in email.list_email_domains(): email_domain[int(dom_entry['domain_id'])] = dom_entry['domain'] for no_exp_dom in email.list_email_domains_with_category( co.email_domain_category_noexport): del email_domain[int(no_exp_dom['domain_id'])] domains = email_domain.values() domains.sort() domain_dict = {} for domain in domains: domain_dict[domain.lower()] = True # Verify that domains have a MX-record. for arg in cereconf.LDAP_MAIL_DNS['dig_args']: zone = arg[0] if domain.endswith(zone) and not (domain in hosts_only_mx or domain in hosts): logger.error("email domain without MX defined: %s" % domain) # Valid email domains only requires MX if domain in hosts_only_mx: del hosts_only_mx[domain] for host in hosts_only_mx: logger.warn( "MX defined but no A/AAAA record or valid email domain: %s" % host) def handle_domain_host(host): f.write("host: %s\n" % lower2host[host]) for cname in hosts[host]: if not domain_dict.has_key(cname): f.write("cn: %s\n" % lower2host[cname]) del cnames[cname] del hosts[host] dn_suffix = ldapconf('MAIL_DNS', 'dn') f.write(container_entry_string('MAIL_DNS')) for domain in domains: f.write("""dn: cn=%s,%s objectClass: uioHost cn: %s """ % (domain, dn_suffix, domain)) domain = domain.lower() if cnames.has_key(domain): f.write("cn: %s\n" % lower2host[cnames[domain]]) handle_domain_host(cnames[domain]) elif hosts.has_key(domain): handle_domain_host(domain) f.write('\n') sorted_hosts = hosts.keys() sorted_hosts.sort() for host in sorted_hosts: f.write("""dn: host=%s,%s objectClass: uioHost host: %s cn: %s """ % (lower2host[host], dn_suffix, lower2host[host], lower2host[host])) for cname in hosts[host]: f.write("cn: %s\n" % lower2host[cname]) f.write('\n') end_ldif_outfile('MAIL_DNS', f)