def check_domain_server_authentication(self, pipe, logon_id, description): messages = os.read(pipe, 8192) messages = get_string(messages) if len(messages) == 0 or messages == "None": self.fail("No Domain server authentication message") # # Look for the SamLogon request matching logon_id msg = None for message in messages.split("\n"): msg = json.loads(get_string(message)) if logon_id == msg["Authentication"]["logonId"]: break msg = None if msg is None: self.fail("No Domain server authentication message") # # Validate that message contains the expected data # self.assertEqual("Authentication", msg["type"]) self.assertEqual(logon_id, msg["Authentication"]["logonId"]) self.assertEqual("SamLogon", msg["Authentication"]["serviceDescription"]) self.assertEqual(description, msg["Authentication"]["authDescription"])
def test_json_matching_entries (self): """ The ``--json`` variants must contain the same keys as their respective plain counterpart. Does not check nested dictionaries (e. g. the ``Flags`` value of ``net ads lookup``.. """ argv = "%s %s" % (COMMAND, self.subcmd) try: out_plain = get_string(self.check_output(argv)) except samba.tests.BlackboxProcessError as e: self.fail("Error calling [%s]: %s" % (argv, e)) argv = "%s %s --json" % (COMMAND, self.subcmd) try: out_jsobj = self.check_output(argv) except samba.tests.BlackboxProcessError as e: self.fail("Error calling [%s]: %s" % (argv, e)) parsed = json.loads (get_string(out_jsobj)) for key in [ re.match (PLAIN_KEY_REGEX, line).group(1) for line in out_plain.split ("\n") if line != "" and line [0] not in " \t:" ]: self.assertTrue (parsed.get (key) is not None) del parsed [key] self.assertTrue (len (parsed) == 0) # tolerate no leftovers
def test_samba_tool_showrepl_summary_forced_failure(self): """Tests 'samba-tool drs showrepl --summary' command when we break the network on purpose. """ self.addCleanup(self._enable_all_repl, self.dc1) self._disable_all_repl(self.dc1) samdb1 = self.getSamDB("-H", "ldap://%s" % self.dc1, "-U", self.cmdline_creds) samdb2 = self.getSamDB("-H", "ldap://%s" % self.dc2, "-U", self.cmdline_creds) domain_dn = samdb1.domain_dn() # Add some things to NOT replicate ou1 = "OU=dc1.%x,%s" % (random.randrange(1 << 64), domain_dn) ou2 = "OU=dc2.%x,%s" % (random.randrange(1 << 64), domain_dn) samdb1.add({"dn": ou1, "objectclass": "organizationalUnit"}) self.addCleanup(samdb1.delete, ou1, ['tree_delete:1']) samdb2.add({"dn": ou2, "objectclass": "organizationalUnit"}) self.addCleanup(samdb2.delete, ou2, ['tree_delete:1']) dn1 = 'cn=u1.%%d,%s' % (ou1) dn2 = 'cn=u2.%%d,%s' % (ou2) try: for i in range(100): samdb1.add({"dn": dn1 % i, "objectclass": "user"}) samdb2.add({"dn": dn2 % i, "objectclass": "user"}) out = self.check_output("samba-tool drs showrepl --summary -v " "%s %s" % (self.dc1, self.cmdline_creds)) out = get_string(out) self.assertStringsEqual('[ALL GOOD]', out, strip=True) out = self.check_output("samba-tool drs showrepl --summary -v " "--color=yes %s %s" % (self.dc2, self.cmdline_creds)) out = get_string(out) self.assertIn('[ALL GOOD]', out) except samba.tests.BlackboxProcessError as e: e_stdout = get_string(e.stdout) e_stderr = get_string(e.stderr) print("Good, failed as expected after %d rounds: %r" % (i, e.cmd)) self.assertIn('There are failing connections', e_stdout, msg=('stdout: %r\nstderr: %r\nretcode: %s' '\nmessage: %r\ncmd: %r') % (e_stdout, e_stderr, e.returncode, e.msg, e.cmd)) self.assertRegexpMatches( e_stdout, r'result 845[67] ' r'\(WERR_DS_DRA_(SINK|SOURCE)_DISABLED\)', msg=("The process should have failed " "because replication was forced off, " "but it failed for some other reason.")) self.assertIn('consecutive failure(s).', e_stdout) else: self.fail("No DRS failure noticed after 100 rounds of trying")
def set_kdc_tdb(self, attribute, val): old_val = self.gp_db.gpostore.get(attribute) self.logger.info('%s was changed from %s to %s' % (attribute, old_val, val)) if val is not None: self.gp_db.gpostore.store(attribute, get_string(val)) self.gp_db.store(str(self), attribute, get_string(old_val) \ if old_val else None) else: self.gp_db.gpostore.delete(attribute) self.gp_db.delete(str(self), attribute)
def test_add_new_uncovered_site(self): name = 'sites' cmd = cmd_sambatool.subcommands[name] cmd.outf = StringIO() cmd.errf = StringIO() site_name = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' # Clear out any existing site cmd._run("samba-tool %s" % name, 'remove', site_name) result = cmd._run("samba-tool %s" % name, 'create', site_name) if result is not None: self.fail("Error creating new site") self.lp = samba.tests.env_loadparm() self.creds = Credentials() self.creds.guess(self.lp) self.session = system_session() uc_fn = self.lp.private_path('dns_update_cache') tmp_uc = uc_fn + '_tmp' shutil.copyfile(uc_fn, tmp_uc) self.samdb = SamDB(session_info=self.session, credentials=self.creds, lp=self.lp) m = ldb.Message() m.dn = ldb.Dn( self.samdb, 'CN=DEFAULTIPSITELINK,CN=IP,' 'CN=Inter-Site Transports,CN=Sites,{0}'.format( self.samdb.get_config_basedn())) m['siteList'] = ldb.MessageElement( "CN={0},CN=Sites,{1}".format(site_name, self.samdb.get_config_basedn()), ldb.FLAG_MOD_ADD, "siteList") dns_c = "samba_dnsupdate --verbose --use-file={0}".format(tmp_uc) out = get_string(self.check_output(dns_c)) self.assertNotIn(site_name.lower(), out) self.samdb.modify(m) shutil.copyfile(uc_fn, tmp_uc) out = get_string(self.check_output(dns_c)) self.assertNotIn("No DNS updates needed", out) self.assertIn(site_name.lower(), out) result = cmd._run("samba-tool %s" % name, 'remove', site_name) if result is not None: self.fail("Error deleting site")
def test_samba_tool_showrepl_json(self): """Tests 'samba-tool drs showrepl --json' command. """ out = self.check_output("samba-tool drs showrepl %s %s --json" % (self.dc1, self.cmdline_creds)) d = json.loads(get_string(out)) self.assertEqual(set(d), set(['repsFrom', 'repsTo', "NTDSConnections", "dsa"])) # dsa for k in ["objectGUID", "invocationId"]: self.assertRegexpMatches(d['dsa'][k], '^%s$' % GUID_RE) self.assertTrue(isinstance(d['dsa']["options"], int)) # repsfrom and repsto for reps in (d['repsFrom'], d['repsTo']): for r in reps: for k in ('NC dn', "NTDS DN"): self.assertRegexpMatches(r[k], '^%s$' % DN_RE) for k in ("last attempt time", "last attempt message", "last success"): self.assertTrue(isinstance(r[k], str)) self.assertRegexpMatches(r["DSA objectGUID"], '^%s$' % GUID_RE) self.assertTrue(isinstance(r["consecutive failures"], int)) # ntdsconnection for n in d["NTDSConnections"]: self.assertRegexpMatches(n["dns name"], r'^[\w]+\.samba\.example\.com$') self.assertRegexpMatches(n["name"], "^%s$" % GUID_RE) self.assertTrue(isinstance(n['enabled'], bool)) self.assertTrue(isinstance(n['options'], int)) self.assertTrue(isinstance(n['replicates NC'], list)) self.assertRegexpMatches(n["remote DN"], "^%s$" % DN_RE)
def string_to_key(cls, string, salt, params): utf8string = get_string(string) tmp = Credentials() tmp.set_anonymous() tmp.set_password(utf8string) nthash = tmp.get_nt_hash() return Key(cls.enctype, nthash)
def setUp(self): super(AuthLogTestsNetLogonBadCreds, self).setUp() self.lp = samba.tests.env_loadparm() self.session = system_session() self.ldb = SamDB(session_info=self.session, lp=self.lp) self.domain = os.environ["DOMAIN"] self.netbios_name = "NetLogonBad" self.machinepass = "******" self.remoteAddress = AS_SYSTEM_MAGIC_PATH_TOKEN self.base_dn = self.ldb.domain_dn() self.dn = ("cn=%s,cn=users,%s" % (self.netbios_name, self.base_dn)) utf16pw = get_string('"' + self.machinepass + '"').encode('utf-16-le') self.ldb.add({ "dn": self.dn, "objectclass": "computer", "sAMAccountName": "%s$" % self.netbios_name, "userAccountControl": str(UF_WORKSTATION_TRUST_ACCOUNT | UF_PASSWD_NOTREQD), "unicodePwd": utf16pw })
def create_machine_account(self): self.machine_pass = samba.generate_random_password(32, 32) self.machine_name = MACHINE_NAME self.machine_dn = "cn=%s,%s" % (self.machine_name, self.ldb.domain_dn()) # remove the account if it exists, this will happen if a previous test # run failed delete_force(self.ldb, self.machine_dn) utf16pw = ('"%s"' % get_string(self.machine_pass)).encode('utf-16-le') self.ldb.add({ "dn": self.machine_dn, "objectclass": "computer", "sAMAccountName": "%s$" % self.machine_name, "userAccountControl": str(UF_WORKSTATION_TRUST_ACCOUNT | UF_PASSWD_NOTREQD), "unicodePwd": utf16pw }) self.machine_creds = Credentials() self.machine_creds.guess(self.get_loadparm()) self.machine_creds.set_secure_channel_type(SEC_CHAN_WKSTA) self.machine_creds.set_kerberos_state(DONT_USE_KERBEROS) self.machine_creds.set_password(self.machine_pass) self.machine_creds.set_username(self.machine_name + "$") self.machine_creds.set_workstation(self.machine_name)
def parse_entry_data(e): if e.type == misc.REG_MULTI_SZ: data = get_string(e.data).replace('\x00', '') return json.loads(data) elif e.type == misc.REG_DWORD and e.data in [0, 1]: return e.data == 1 return e.data
def test_help_tree(self): # we call actual subprocesses, because we are probing the # actual help output where there is no sub-command. Don't copy # this if you have an actual command: for that use # self.runcmd() or self.runsubcmd(). known_commands = [[]] failed_commands = [] for i in range(4): new_commands = [] for c in known_commands: line = ' '.join(['samba-tool'] + c + ['--help']) try: output = self.check_output(line) except BlackboxProcessError as e: output = e.stdout failed_commands.append(c) output = get_string(output) tail = output.partition('Available subcommands:')[2] subcommands = re.findall(r'^\s*([\w-]+)\s+-', tail, re.MULTILINE) for s in subcommands: new_commands.append(c + [s]) # check that `samba-tool help X Y` == `samba-tool X Y --help` line = ' '.join(['samba-tool', 'help'] + c) try: output2 = self.check_output(line) except BlackboxProcessError as e: output2 = e.stdout failed_commands.append(c) output2 = get_string(output2) self.assertEqual(output, output2) err = check_help_consistency(output, options_start='Options:', options_end='Available subcommands:') if err is not None: self.fail("consistency error with %s:\n%s" % (line, err)) if not new_commands: break known_commands = new_commands self.assertEqual(failed_commands, [])
def run(cmd): """ Run a cmd, return bytes str for py2 and unicode str for py3. NOTE: subprocess api always return bytes, in both py2 and py3. """ output = subprocess.check_output(cmd).strip() return get_string(output)
def test_json_wellformed (self): """The output of ``--json`` commands must parse as JSON.""" argv = "%s %s --json" % (COMMAND, self.subcmd) try: out = self.check_output(argv) json.loads (get_string(out)) except samba.tests.BlackboxProcessError as e: self.fail("Error calling [%s]: %s" % (argv, e))
def _create_ou(self, samdb, name): ldif = """ dn: %s,%s objectClass: organizationalUnit """ % (name, self.domain_dn) samdb.add_ldif(ldif) res = samdb.search(base="%s,%s" % (name, self.domain_dn), scope=SCOPE_BASE, attrs=["objectGUID"]) return get_string(self._GUID_string(res[0]["objectGUID"][0]))
def setUp(self): super(NTLMAuthHelpersTests, self).setUp() self.username = os.environ["DC_USERNAME"] self.password = os.environ["DC_PASSWORD"] self.domain = os.environ["DOMAIN"] out = get_string(self.check_output("wbinfo -n %s" % self.username)) self.group_sid = out.split(" ")[0] self.assertTrue(self.group_sid.startswith("S-1-5-21-")) self.bad_group_sid = self.group_sid[:-2]
def test_samba_tool_showrepl_pull_summary_all_good(self): """Tests 'samba-tool drs showrepl --pull-summary' command.""" # To be sure that all is good we need to force replication # with everyone (because others might have it turned off), and # turn replication on for them in case they suddenly decide to # try again. # # We don't restore them to the non-auto-replication state. samdb1 = self.getSamDB("-H", "ldap://%s" % self.dc1, "-U", self.cmdline_creds) self._enable_all_repl(self.dc1) self._force_all_reps(samdb1, self.dc1, 'inbound') self._force_all_reps(samdb1, self.dc1, 'outbound') try: out = self.check_output( "samba-tool drs showrepl --pull-summary %s %s" % (self.dc1, self.cmdline_creds)) out = get_string(out) self.assertStringsEqual(out, "[ALL GOOD]\n") out = get_string(out) out = self.check_output("samba-tool drs showrepl --pull-summary " "--color=yes %s %s" % (self.dc1, self.cmdline_creds)) out = get_string(out) self.assertStringsEqual(out, "\033[1;32m[ALL GOOD]\033[0m\n") # --verbose output is still quiet when all is good. out = self.check_output( "samba-tool drs showrepl --pull-summary -v %s %s" % (self.dc1, self.cmdline_creds)) out = get_string(out) self.assertStringsEqual(out, "[ALL GOOD]\n") out = self.check_output( "samba-tool drs showrepl --pull-summary -v " "--color=yes %s %s" % (self.dc1, self.cmdline_creds)) out = get_string(out) except samba.tests.BlackboxProcessError as e: self.fail(str(e)) self.assertStringsEqual(out, "\033[1;32m[ALL GOOD]\033[0m\n")
def dconf_update(log, test_dir): if test_dir is not None: return dconf = shutil.which('dconf') if dconf is None: log.error('Failed to update dconf. Command not found') return p = Popen([dconf, 'update'], stdout=PIPE, stderr=PIPE) out, err = p.communicate() if p.returncode != 0: log.error('Failed to update dconf: %s' % get_string(err))
def process_group_policy(self, deleted_gpo_list, changed_gpo_list, cfg_dir='/etc/ssh/sshd_config.d'): for guid, settings in deleted_gpo_list: self.gp_db.set_guid(guid) if str(self) in settings: for attribute, sshd_config in settings[str(self)].items(): if os.path.exists(sshd_config): os.unlink(sshd_config) self.gp_db.delete(str(self), attribute) self.gp_db.commit() for gpo in changed_gpo_list: if gpo.file_sys_path: self.gp_db.set_guid(gpo.name) xml = 'MACHINE/VGP/VTLA/SshCfg/SshD/manifest.xml' path = os.path.join(gpo.file_sys_path, xml) xml_conf = self.parse(path) if not xml_conf: continue policy = xml_conf.find('policysetting') data = policy.find('data') configfile = data.find('configfile') for configsection in configfile.findall('configsection'): if configsection.find('sectionname').text: continue settings = {} for kv in configsection.findall('keyvaluepair'): settings[kv.find('key')] = kv.find('value') attribute = get_string( b64encode(get_bytes(gpo.name) + get_bytes(cfg_dir))) fname = self.gp_db.retrieve(str(self), attribute) if not os.path.isdir(cfg_dir): os.mkdir(cfg_dir, 0o640) if fname and os.path.exists(fname): f = open(fname, 'wb') else: f = NamedTemporaryFile(prefix='gp_', delete=False, dir=cfg_dir) f.write(intro) for k, v in settings.items(): f.write(b'%s %s\n' % \ (get_bytes(k.text), get_bytes(v.text))) os.chmod(f.name, 0o640) self.gp_db.store(str(self), attribute, f.name) self.gp_db.commit() f.close()
def netlogon(self): server = os.environ["SERVER"] host = os.environ["SERVER_IP"] lp = self.get_loadparm() credentials = self.get_credentials() session = system_session() ldb = SamDB(url="ldap://%s" % host, session_info=session, credentials=credentials, lp=lp) machine_pass = samba.generate_random_password(32, 32) machine_name = MACHINE_NAME machine_dn = "cn=%s,%s" % (machine_name, ldb.domain_dn()) delete_force(ldb, machine_dn) utf16pw = ('"%s"' % get_string(machine_pass)).encode('utf-16-le') ldb.add({ "dn": machine_dn, "objectclass": "computer", "sAMAccountName": "%s$" % machine_name, "userAccountControl": str(UF_WORKSTATION_TRUST_ACCOUNT | UF_PASSWD_NOTREQD), "unicodePwd": utf16pw }) machine_creds = Credentials() machine_creds.guess(lp) machine_creds.set_secure_channel_type(SEC_CHAN_WKSTA) machine_creds.set_kerberos_state(DONT_USE_KERBEROS) machine_creds.set_password(machine_pass) machine_creds.set_username(machine_name + "$") machine_creds.set_workstation(machine_name) netlogon.netlogon("ncacn_ip_tcp:%s[schannel,seal]" % server, lp, machine_creds) delete_force(ldb, machine_dn)
def create_user_account(self): self.user_pass = self.random_password() self.user_name = USER_NAME self.user_dn = "cn=%s,%s" % (self.user_name, self.ldb.domain_dn()) # remove the account if it exists, this will happen if a previous test # run failed delete_force(self.ldb, self.user_dn) utf16pw = ('"%s"' % get_string(self.user_pass)).encode('utf-16-le') self.ldb.add({ "dn": self.user_dn, "objectclass": "user", "sAMAccountName": "%s" % self.user_name, "userAccountControl": str(UF_NORMAL_ACCOUNT), "unicodePwd": utf16pw }) self.user_creds = Credentials() self.user_creds.guess(self.get_loadparm()) self.user_creds.set_password(self.user_pass) self.user_creds.set_username(self.user_name) self.user_creds.set_workstation(self.server)
def create_named_conf(paths, realm, dnsdomain, dns_backend, logger): """Write out a file containing zone statements suitable for inclusion in a named.conf file (including GSS-TSIG configuration). :param paths: all paths :param realm: Realm name :param dnsdomain: DNS Domain name :param dns_backend: DNS backend type :param keytab_name: File name of DNS keytab file :param logger: Logger object """ # TODO: This really should have been done as a top level import. # It is done here to avoid a depencency loop. That is, we move # ProvisioningError to another file, and have all the provision # scripts import it from there. from samba.provision import ProvisioningError if dns_backend == "BIND9_FLATFILE": setup_file(setup_path("named.conf"), paths.namedconf, { "DNSDOMAIN": dnsdomain, "REALM": realm, "ZONE_FILE": paths.dns, "REALM_WC": "*." + ".".join(realm.split(".")[1:]), "NAMED_CONF": paths.namedconf, "NAMED_CONF_UPDATE": paths.namedconf_update }) setup_file(setup_path("named.conf.update"), paths.namedconf_update) elif dns_backend == "BIND9_DLZ": bind_info = subprocess.Popen(['named -V'], shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd='.').communicate()[0] bind_info = get_string(bind_info) bind9_8 = '#' bind9_9 = '#' bind9_10 = '#' bind9_11 = '#' bind9_12 = '#' bind9_14 = '#' bind9_16 = '#' if bind_info.upper().find('BIND 9.8') != -1: bind9_8 = '' elif bind_info.upper().find('BIND 9.9') != -1: bind9_9 = '' elif bind_info.upper().find('BIND 9.10') != -1: bind9_10 = '' elif bind_info.upper().find('BIND 9.11') != -1: bind9_11 = '' elif bind_info.upper().find('BIND 9.12') != -1: bind9_12 = '' elif bind_info.upper().find('BIND 9.14') != -1: bind9_14 = '' elif bind_info.upper().find('BIND 9.16') != -1: bind9_16 = '' elif bind_info.upper().find('BIND 9.7') != -1: raise ProvisioningError("DLZ option incompatible with BIND 9.7.") elif bind_info.upper().find('BIND_9.13') != -1: raise ProvisioningError("Only stable/esv releases of BIND are supported.") elif bind_info.upper().find('BIND_9.15') != -1: raise ProvisioningError("Only stable/esv releases of BIND are supported.") elif bind_info.upper().find('BIND_9.17') != -1: raise ProvisioningError("Only stable/esv releases of BIND are supported.") else: logger.warning("BIND version unknown, please modify %s manually." % paths.namedconf) setup_file(setup_path("named.conf.dlz"), paths.namedconf, { "NAMED_CONF": paths.namedconf, "MODULESDIR": samba.param.modules_dir(), "BIND9_8": bind9_8, "BIND9_9": bind9_9, "BIND9_10": bind9_10, "BIND9_11": bind9_11, "BIND9_12": bind9_12, "BIND9_14": bind9_14, "BIND9_16": bind9_16 })
def _test_samba_tool_replicate_local_no_dns(self): """Check we can provision a database without DNS partitions (and then add them afterwards).""" server_rootdse, _ = self._get_rootDSE(self.dc1) nc_name = server_rootdse["defaultNamingContext"] server_ldap_service_name = str(server_rootdse["ldapServiceName"][0]) server_realm = server_ldap_service_name.split(":")[0] creds = self.get_credentials() # We have to give it a different netbiosname every time # it runs, otherwise the collision causes strange issues # to happen. This should be different on different environments. netbiosname = "dns" + self.backend + self.dc1 if len(netbiosname) > 15: netbiosname = netbiosname[:15] out = self.check_output( "samba-tool domain join %s dc --server=%s %s --targetdir=%s --option=netbiosname=%s %s --backend-store=%s" % (server_realm, self.dc1, self.cmdline_creds, self.tempdir, netbiosname, "--dns-backend=NONE", self.backend)) new_dc_config_file = os.path.join(self.tempdir, "etc", "smb.conf") new_dc_sam = os.path.join(self.tempdir, "private", "sam.ldb") forestdns_dn = ldb.binary_encode('DC=ForestDNSZones,' + str(nc_name)) domaindns_dn = ldb.binary_encode('DC=DomainDNSZones,' + str(nc_name)) self.check_output( "samba-tool drs replicate --local %s %s %s %s --configfile=%s --full-sync" % ("invalid", self.dc1, forestdns_dn, self.cmdline_creds, new_dc_config_file)) self.check_output( "samba-tool drs replicate --local %s %s %s %s --configfile=%s --full-sync" % ("invalid", self.dc1, domaindns_dn, self.cmdline_creds, new_dc_config_file)) server_rootdse, samdb = self._get_rootDSE("ldb://" + new_dc_sam, ldap_only=False) server_ds_name = ldb.binary_encode( server_rootdse["dsServiceName"][0].decode('utf-8')) # Show that Has-Master-NCs is fixed by samba_upgradedns res = samdb.search(base=server_ds_name, expression="(msds-hasmasterncs=%s)" % forestdns_dn) self.assertEqual(len(res), 0) res = samdb.search(base=server_ds_name, expression="(msds-hasmasterncs=%s)" % domaindns_dn) self.assertEqual(len(res), 0) self.check_output("samba_upgradedns --configfile=%s" % (new_dc_config_file)) res = samdb.search(base=server_ds_name, expression="(msds-hasmasterncs=%s)" % forestdns_dn) self.assertEqual(len(res), 1) res = samdb.search(base=server_ds_name, expression="(msds-hasmasterncs=%s)" % domaindns_dn) self.assertEqual(len(res), 1) # Show that replica locations is fixed by dbcheck res = samdb.search( controls=["search_options:1:2"], expression="(&(msds-nc-replica-locations=%s)(ncname=%s))" % (server_ds_name, forestdns_dn)) self.assertEqual(len(res), 0) res = samdb.search( controls=["search_options:1:2"], expression="(&(msds-nc-replica-locations=%s)(ncname=%s))" % (server_ds_name, domaindns_dn)) self.assertEqual(len(res), 0) try: # This fixes any forward-link-backward-link issues with the tools self.check_output( "samba-tool dbcheck --configfile=%s --cross-ncs --fix --yes" % (new_dc_config_file)) except BlackboxProcessError as e: self.assertTrue("Checked " in get_string(e.stdout)) self.check_output("samba-tool dbcheck --configfile=%s --cross-ncs" % (new_dc_config_file)) # Compare the two directories self.check_output( "samba-tool ldapcmp ldap://%s ldb://%s %s --filter=%s" % (self.dc1, new_dc_sam, self.cmdline_creds, "msDs-masteredBy,msDS-NC-Replica-Locations,msDS-hasMasterNCs")) # Check all ForestDNS connections and backlinks res = samdb.search(base=server_ds_name, expression="(msds-hasmasterncs=%s)" % forestdns_dn) self.assertEqual(len(res), 1) res = samdb.search(base=forestdns_dn, expression="(msds-masteredby=%s)" % server_ds_name) self.assertEqual(len(res), 1) res = samdb.search( controls=["search_options:1:2"], expression="(&(msds-nc-replica-locations=%s)(ncname=%s))" % (server_ds_name, forestdns_dn)) self.assertEqual(len(res), 1) # Check all DomainDNS connections and backlinks res = samdb.search(base=server_ds_name, expression="(msds-hasmasterncs=%s)" % domaindns_dn) self.assertEqual(len(res), 1) res = samdb.search(base=domaindns_dn, expression="(msds-masteredby=%s)" % server_ds_name) self.assertEqual(len(res), 1) res = samdb.search( controls=["search_options:1:2"], expression="(&(msds-nc-replica-locations=%s)(ncname=%s))" % (server_ds_name, domaindns_dn)) self.assertEqual(len(res), 1) # Demote the DC we created in the test self.check_output( "samba-tool domain demote --remove-other-dead-server=%s -H ldap://%s %s --configfile=%s" % (netbiosname, self.dc1, self.cmdline_creds, new_dc_config_file))
def _GUID_string(self, guid): return get_string( self.test_ldb_dc.schema_format_value("objectGUID", guid))
def parse_entry_data(name, e): dict_entries = [ 'VirtualKeyboardFeatures', 'DeviceArcDataSnapshotHours', 'RequiredClientCertificateForDevice', 'RequiredClientCertificateForUser', 'RegisteredProtocolHandlers', 'WebUsbAllowDevicesForUrls', 'DeviceAutoUpdateTimeRestrictions', 'DeviceUpdateStagingSchedule', 'DeviceMinimumVersion', 'DeviceDisplayResolution', 'ExtensionSettings', 'KerberosAccounts', 'NetworkFileSharesPreconfiguredShares', 'NetworkThrottlingEnabled', 'TPMFirmwareUpdateSettings', 'DeviceOffHours', 'ParentAccessCodeConfig', 'PerAppTimeLimits', 'PerAppTimeLimitsWhitelist', 'PerAppTimeLimitsAllowlist', 'UsageTimeLimit', 'PluginVmImage', 'DeviceLoginScreenPowerManagement', 'PowerManagementIdleSettings', 'ScreenLockDelays', 'ScreenBrightnessPercent', 'DevicePowerPeakShiftDayConfig', 'DeviceAdvancedBatteryChargeModeDayConfig', 'PrintingPaperSizeDefault', 'AutoLaunchProtocolsFromOrigins', 'BrowsingDataLifetime', 'DataLeakPreventionRulesList', 'DeviceLoginScreenWebUsbAllowDevicesForUrls', 'DeviceScheduledUpdateCheck', 'KeyPermissions', 'ManagedBookmarks', 'ManagedConfigurationPerOrigin', 'ProxySettings', 'SystemProxySettings', 'WebAppInstallForceList' ] bools = [ 'ShowAccessibilityOptionsInSystemTrayMenu', 'LargeCursorEnabled', 'SpokenFeedbackEnabled', 'HighContrastEnabled', 'VirtualKeyboardEnabled', 'StickyKeysEnabled', 'KeyboardDefaultToFunctionKeys', 'DictationEnabled', 'SelectToSpeakEnabled', 'KeyboardFocusHighlightEnabled', 'CursorHighlightEnabled', 'CaretHighlightEnabled', 'MonoAudioEnabled', 'AccessibilityShortcutsEnabled', 'AutoclickEnabled', 'DeviceLoginScreenDefaultLargeCursorEnabled', 'DeviceLoginScreenDefaultSpokenFeedbackEnabled', 'DeviceLoginScreenDefaultHighContrastEnabled', 'DeviceLoginScreenDefaultVirtualKeyboardEnabled', 'DeviceLoginScreenLargeCursorEnabled', 'DeviceLoginScreenSpokenFeedbackEnabled', 'DeviceLoginScreenHighContrastEnabled', 'DeviceLoginScreenVirtualKeyboardEnabled', 'DeviceLoginScreenDictationEnabled', 'DeviceLoginScreenSelectToSpeakEnabled', 'DeviceLoginScreenCursorHighlightEnabled', 'DeviceLoginScreenCaretHighlightEnabled', 'DeviceLoginScreenMonoAudioEnabled', 'DeviceLoginScreenAutoclickEnabled', 'DeviceLoginScreenStickyKeysEnabled', 'DeviceLoginScreenKeyboardFocusHighlightEnabled', 'DeviceLoginScreenShowOptionsInSystemTrayMenu', 'DeviceLoginScreenAccessibilityShortcutsEnabled', 'FloatingAccessibilityMenuEnabled', 'ArcEnabled', 'UnaffiliatedArcAllowed', 'AppRecommendationZeroStateEnabled', 'DeviceBorealisAllowed', 'UserBorealisAllowed', 'SystemUse24HourClock', 'DefaultSearchProviderEnabled', 'ChromeOsReleaseChannelDelegated', 'DeviceAutoUpdateDisabled', 'DeviceAutoUpdateP2PEnabled', 'DeviceUpdateHttpDownloadsEnabled', 'RebootAfterUpdate', 'BlockExternalExtensions', 'VoiceInteractionContextEnabled', 'VoiceInteractionHotwordEnabled', 'EnableMediaRouter', 'ShowCastIconInToolbar', 'DriveDisabled', 'DriveDisabledOverCellular', 'DisableAuthNegotiateCnameLookup', 'EnableAuthNegotiatePort', 'BasicAuthOverHttpEnabled', 'AuthNegotiateDelegateByKdcPolicy', 'AllowCrossOriginAuthPrompt', 'NtlmV2Enabled', 'IntegratedWebAuthenticationAllowed', 'BrowserSwitcherEnabled', 'BrowserSwitcherKeepLastChromeTab', 'BrowserSwitcherUseIeSitelist', 'VirtualMachinesAllowed', 'CrostiniAllowed', 'DeviceUnaffiliatedCrostiniAllowed', 'CrostiniExportImportUIAllowed', 'CrostiniPortForwardingAllowed', 'NativeMessagingUserLevelHosts', 'NetworkFileSharesAllowed', 'NetBiosShareDiscoveryEnabled', 'NTLMShareAuthenticationEnabled', 'DeviceDataRoamingEnabled', 'DeviceWiFiFastTransitionEnabled', 'DeviceWiFiAllowed', 'DeviceAllowBluetooth', 'DeviceAllowRedeemChromeOsRegistrationOffers', 'DeviceQuirksDownloadEnabled', 'SuggestedContentEnabled', 'DeviceShowLowDiskSpaceNotification', 'PasswordManagerEnabled', 'PasswordLeakDetectionEnabled', 'PluginVmAllowed', 'PluginVmDataCollectionAllowed', 'UserPluginVmAllowed', 'DeviceRebootOnShutdown', 'PowerManagementUsesAudioActivity', 'PowerManagementUsesVideoActivity', 'AllowWakeLocks', 'AllowScreenWakeLocks', 'WaitForInitialUserActivity', 'PowerSmartDimEnabled', 'DevicePowerPeakShiftEnabled', 'DeviceBootOnAcEnabled', 'DeviceAdvancedBatteryChargeModeEnabled', 'DeviceUsbPowerShareEnabled', 'PrintingEnabled', 'CloudPrintProxyEnabled', 'PrintingSendUsernameAndFilenameEnabled', 'CloudPrintSubmitEnabled', 'DisablePrintPreview', 'PrintHeaderFooter', 'PrintPreviewUseSystemDefaultPrinter', 'UserNativePrintersAllowed', 'UserPrintersAllowed', 'DeletePrintJobHistoryAllowed', 'DeviceLoginScreenPrivacyScreenEnabled', 'PrivacyScreenEnabled', 'PinUnlockWeakPinsAllowed', 'PinUnlockAutosubmitEnabled', 'RemoteAccessHostFirewallTraversal', 'RemoteAccessHostRequireCurtain', 'RemoteAccessHostAllowClientPairing', 'RemoteAccessHostAllowRelayedConnection', 'RemoteAccessHostAllowUiAccessForRemoteAssistance', 'RemoteAccessHostAllowFileTransfer', 'RemoteAccessHostAllowRemoteAccessConnections', 'AttestationEnabledForUser', 'SafeBrowsingEnabled', 'SafeBrowsingExtendedReportingEnabled', 'DeviceGuestModeEnabled', 'DeviceAllowNewUsers', 'DeviceShowUserNamesOnSignin', 'DeviceEphemeralUsersEnabled', 'DeviceShowNumericKeyboardForPassword', 'DeviceFamilyLinkAccountsAllowed', 'ShowHomeButton', 'HomepageIsNewTabPage', 'DeviceMetricsReportingEnabled', 'DeviceWilcoDtcAllowed', 'AbusiveExperienceInterventionEnforce', 'AccessibilityImageLabelsEnabled', 'AdditionalDnsQueryTypesEnabled', 'AdvancedProtectionAllowed', 'AllowDeletingBrowserHistory', 'AllowDinosaurEasterEgg', 'AllowFileSelectionDialogs', 'AllowScreenLock', 'AllowSyncXHRInPageDismissal', 'AlternateErrorPagesEnabled', 'AlwaysOpenPdfExternally', 'AppCacheForceEnabled', 'AudioCaptureAllowed', 'AudioOutputAllowed', 'AudioProcessHighPriorityEnabled', 'AudioSandboxEnabled', 'AutoFillEnabled', 'AutofillAddressEnabled', 'AutofillCreditCardEnabled', 'AutoplayAllowed', 'BackgroundModeEnabled', 'BlockThirdPartyCookies', 'BookmarkBarEnabled', 'BrowserAddPersonEnabled', 'BrowserGuestModeEnabled', 'BrowserGuestModeEnforced', 'BrowserLabsEnabled', 'BrowserNetworkTimeQueriesEnabled', 'BuiltInDnsClientEnabled', 'CECPQ2Enabled', 'CaptivePortalAuthenticationIgnoresProxy', 'ChromeCleanupEnabled', 'ChromeCleanupReportingEnabled', 'ChromeOsLockOnIdleSuspend', 'ClickToCallEnabled', 'CloudManagementEnrollmentMandatory', 'CloudPolicyOverridesPlatformPolicy', 'CloudUserPolicyMerge', 'CommandLineFlagSecurityWarningsEnabled', 'ComponentUpdatesEnabled', 'DNSInterceptionChecksEnabled', 'DataLeakPreventionReportingEnabled', 'DefaultBrowserSettingEnabled', 'DefaultSearchProviderContextMenuAccessAllowed', 'DeveloperToolsDisabled', 'DeviceAllowMGSToStoreDisplayProperties', 'DeviceDebugPacketCaptureAllowed', 'DeviceLocalAccountManagedSessionEnabled', 'DeviceLoginScreenPrimaryMouseButtonSwitch', 'DevicePciPeripheralDataAccessEnabled', 'DevicePowerwashAllowed', 'DeviceSystemWideTracingEnabled', 'Disable3DAPIs', 'DisableSafeBrowsingProceedAnyway', 'DisableScreenshots', 'EasyUnlockAllowed', 'EditBookmarksEnabled', 'EmojiSuggestionEnabled', 'EnableDeprecatedPrivetPrinting', 'EnableOnlineRevocationChecks', 'EnableSyncConsent', 'EnterpriseHardwarePlatformAPIEnabled', 'ExternalProtocolDialogShowAlwaysOpenCheckbox', 'ExternalStorageDisabled', 'ExternalStorageReadOnly', 'ForceBrowserSignin', 'ForceEphemeralProfiles', 'ForceGoogleSafeSearch', 'ForceMaximizeOnFirstRun', 'ForceSafeSearch', 'ForceYouTubeSafetyMode', 'FullscreenAlertEnabled', 'FullscreenAllowed', 'GloballyScopeHTTPAuthCacheEnabled', 'HardwareAccelerationModeEnabled', 'HideWebStoreIcon', 'ImportAutofillFormData', 'ImportBookmarks', 'ImportHistory', 'ImportHomepage', 'ImportSavedPasswords', 'ImportSearchEngine', 'IncognitoEnabled', 'InsecureFormsWarningsEnabled', 'InsecurePrivateNetworkRequestsAllowed', 'InstantTetheringAllowed', 'IntensiveWakeUpThrottlingEnabled', 'JavascriptEnabled', 'LacrosAllowed', 'LacrosSecondaryProfilesAllowed', 'LockScreenMediaPlaybackEnabled', 'LoginDisplayPasswordButtonEnabled', 'ManagedGuestSessionPrivacyWarningsEnabled', 'MediaRecommendationsEnabled', 'MediaRouterCastAllowAllIPs', 'MetricsReportingEnabled', 'NTPCardsVisible', 'NTPCustomBackgroundEnabled', 'NativeWindowOcclusionEnabled', 'NearbyShareAllowed', 'PaymentMethodQueryEnabled', 'PdfAnnotationsEnabled', 'PhoneHubAllowed', 'PhoneHubNotificationsAllowed', 'PhoneHubTaskContinuationAllowed', 'PolicyAtomicGroupsEnabled', 'PrimaryMouseButtonSwitch', 'PromotionalTabsEnabled', 'PromptForDownloadLocation', 'QuicAllowed', 'RendererCodeIntegrityEnabled', 'RequireOnlineRevocationChecksForLocalAnchors', 'RoamingProfileSupportEnabled', 'SSLErrorOverrideAllowed', 'SafeBrowsingForTrustedSourcesEnabled', 'SavingBrowserHistoryDisabled', 'ScreenCaptureAllowed', 'ScrollToTextFragmentEnabled', 'SearchSuggestEnabled', 'SecondaryGoogleAccountSigninAllowed', 'SharedArrayBufferUnrestrictedAccessAllowed', 'SharedClipboardEnabled', 'ShowAppsShortcutInBookmarkBar', 'ShowFullUrlsInAddressBar', 'ShowLogoutButtonInTray', 'SignedHTTPExchangeEnabled', 'SigninAllowed', 'SigninInterceptionEnabled', 'SitePerProcess', 'SmartLockSigninAllowed', 'SmsMessagesAllowed', 'SpellCheckServiceEnabled', 'SpellcheckEnabled', 'StartupBrowserWindowLaunchSuppressed', 'StricterMixedContentTreatmentEnabled', 'SuggestLogoutAfterClosingLastWindow', 'SuppressDifferentOriginSubframeDialogs', 'SuppressUnsupportedOSWarning', 'SyncDisabled', 'TargetBlankImpliesNoOpener', 'TaskManagerEndProcessEnabled', 'ThirdPartyBlockingEnabled', 'TouchVirtualKeyboardEnabled', 'TranslateEnabled', 'TripleDESEnabled', 'UnifiedDesktopEnabledByDefault', 'UrlKeyedAnonymizedDataCollectionEnabled', 'UserAgentClientHintsEnabled', 'UserFeedbackAllowed', 'VideoCaptureAllowed', 'VmManagementCliAllowed', 'VpnConfigAllowed', 'WPADQuickCheckEnabled', 'WebRtcAllowLegacyTLSProtocols', 'WebRtcEventLogCollectionAllowed', 'WifiSyncAndroidAllowed', 'WindowOcclusionEnabled' ] if name in dict_entries: return json.loads(get_string(e.data)) elif e.type == misc.REG_DWORD and name in bools: return e.data == 1 return e.data
def read_ms_markdown(in_file, out_folder=None, out_dict=None): """ Read Github documentation to produce forest wide udpates :param in_file: Forest-Wide-Updates.md :param out_folder: output folder :param out_dict: output dictionary """ with open(in_file) as update_file: # There is a hidden ClaimPossibleValues in this md file html = markdown.markdown(re.sub(r'CN=<forest root domain.*?>', '${FOREST_ROOT_DOMAIN}', update_file.read()), output_format='xhtml') html = html.replace('CN=Schema,%ws', '${SCHEMA_DN}') tree = ET.fromstring('<root>' + html + '</root>') for node in tree: if node.text and node.text.startswith('|Operation'): # Strip first and last | updates = [ x[1:len(x) - 1].split('|') for x in get_string( ET.tostring(node, method='text')).splitlines() ] for update in updates[2:]: output = re.match('Operation (\d+): {(.*)}', update[0]) if output: # print output.group(1), output.group(2) guid = output.group(2) filename = "%s-{%s}.ldif" % (output.group(1).zfill(4), guid) found = False if update[3].startswith('Created') or update[1].startswith( 'Added ACE'): # Trigger the security descriptor code # Reduce info to just the security descriptor update[3] = update[3].split(':')[-1] result = parse_ace(update[1], update[2], update[3]) if filename and out_folder is not None: save_ldif(filename, result, out_folder) else: save_array(guid, result, out_dict) continue for operation in operation_map: if update[1].startswith(operation): found = True result = operation_map[operation](update[1], update[2], update[3]) if filename and out_folder is not None: save_ldif(filename, [result], out_folder) else: save_array(guid, [result], out_dict) break if not found: raise Exception(update)
def GUID_string(self, guid): return get_string(self.samdb.schema_format_value("objectGUID", guid))
def get_local_repl_data(self): drsuapi_connect(self) samdb_connect(self) # show domain information ntds_dn = self.samdb.get_dsServiceName() (site, server) = drs_parse_ntds_dn(ntds_dn) try: ntds = self.samdb.search(base=ntds_dn, scope=ldb.SCOPE_BASE, attrs=['options', 'objectGUID', 'invocationId']) except Exception as e: raise CommandError("Failed to search NTDS DN %s" % ntds_dn) dsa_details = { "options": int(attr_default(ntds[0], "options", 0)), "objectGUID": get_string(self.samdb.schema_format_value( "objectGUID", ntds[0]["objectGUID"][0])), "invocationId": get_string(self.samdb.schema_format_value( "objectGUID", ntds[0]["invocationId"][0])) } conn = self.samdb.search(base=ntds_dn, expression="(objectClass=nTDSConnection)") repsfrom = self.get_neighbours(drsuapi.DRSUAPI_DS_REPLICA_INFO_NEIGHBORS) repsto = self.get_neighbours(drsuapi.DRSUAPI_DS_REPLICA_INFO_REPSTO) conn_details = [] for c in conn: c_rdn, sep, c_server_dn = str(c['fromServer'][0]).partition(',') d = { 'name': str(c['name']), 'remote DN': str(c['fromServer'][0]), 'options': int(attr_default(c, 'options', 0)), 'enabled': (get_string(attr_default(c, 'enabledConnection', 'TRUE')).upper() == 'TRUE') } conn_details.append(d) try: c_server_res = self.samdb.search(base=c_server_dn, scope=ldb.SCOPE_BASE, attrs=["dnsHostName"]) d['dns name'] = str(c_server_res[0]["dnsHostName"][0]) except ldb.LdbError as e: (errno, _) = e.args if errno == ldb.ERR_NO_SUCH_OBJECT: d['is deleted'] = True except (KeyError, IndexError): pass d['replicates NC'] = [] for r in c.get('mS-DS-ReplicatesNCReason', []): a = str(r).split(':') d['replicates NC'].append((a[3], int(a[2]))) return { 'dsa': dsa_details, 'repsFrom': repsfrom, 'repsTo': repsto, 'NTDSConnections': conn_details, 'site': site, 'server': server }
def __init__(self, domain_sid, invocationid=None, schemadn=None, files=None, override_prefixmap=None, additional_prefixmap=None, base_schema=None): from samba.provision import setup_path """Load schema for the SamDB from the AD schema files and samba4_schema.ldif :param samdb: Load a schema into a SamDB. :param schemadn: DN of the schema Returns the schema data loaded, to avoid double-parsing when then needing to add it to the db """ if base_schema is None: base_schema = Schema.default_base_schema() self.base_schema = base_schema self.schemadn = schemadn # We need to have the am_rodc=False just to keep some warnings quiet - # this isn't a real SAM, so it's meaningless. self.ldb = SamDB(global_schema=False, am_rodc=False) if invocationid is not None: self.ldb.set_invocation_id(invocationid) self.schema_data = read_ms_schema( setup_path('ad-schema/%s' % Schema.base_schemas[base_schema][0]), setup_path('ad-schema/%s' % Schema.base_schemas[base_schema][1])) def read_file(file): with open(file, 'rb') as data_file: return data_file.read() if files is not None: self.schema_data = "".join( get_string(read_file(file)) for file in files) self.schema_data = substitute_var(self.schema_data, {"SCHEMADN": schemadn}) check_all_substituted(self.schema_data) schema_version = str(Schema.get_version(base_schema)) self.schema_dn_modify = read_and_sub_file( setup_path("provision_schema_basedn_modify.ldif"), { "SCHEMADN": schemadn, "OBJVERSION": schema_version }) descr = b64encode(get_schema_descriptor(domain_sid)).decode('utf8') self.schema_dn_add = read_and_sub_file( setup_path("provision_schema_basedn.ldif"), { "SCHEMADN": schemadn, "DESCRIPTOR": descr }) if override_prefixmap is not None: self.prefixmap_data = override_prefixmap else: self.prefixmap_data = read_file(setup_path("prefixMap.txt")) if additional_prefixmap is not None: self.prefixmap_data += "".join("%s\n" % map for map in additional_prefixmap) self.prefixmap_data = b64encode(self.prefixmap_data).decode('utf8') # We don't actually add this ldif, just parse it prefixmap_ldif = "dn: %s\nprefixMap:: %s\n\n" % (self.schemadn, self.prefixmap_data) self.set_from_ldif(prefixmap_ldif, self.schema_data, self.schemadn)
def test_samba_tool_showrepl(self): """Tests 'samba-tool drs showrepl' command. """ # Output should be like: # <site-name>/<domain-name> # DSA Options: <hex-options> # DSA object GUID: <DSA-object-GUID> # DSA invocationId: <DSA-invocationId> # <Inbound-connections-list> # <Outbound-connections-list> # <KCC-objects> # ... # TODO: Perhaps we should check at least for # DSA's objectGUDI and invocationId out = self.check_output("samba-tool drs showrepl " "%s %s" % (self.dc1, self.cmdline_creds)) out = get_string(out) # We want to assert that we are getting the same results, but # dates and GUIDs change randomly. # # There are sections with headers like ==== THIS ====" (header, _inbound, inbound, _outbound, outbound, _conn, conn) = out.split("====") self.assertEqual(_inbound, ' INBOUND NEIGHBORS ') self.assertEqual(_outbound, ' OUTBOUND NEIGHBORS ') self.assertEqual(_conn, ' KCC CONNECTION OBJECTS ') self.assertRegexpMatches( header, r'^Default-First-Site-Name\\LOCALDC\s+' r"DSA Options: %s\s+" r"DSA object GUID: %s\s+" r"DSA invocationId: %s" % (HEX8_RE, GUID_RE, GUID_RE)) # We don't assert the DomainDnsZones and ForestDnsZones are # there because we don't know that they have been set up yet. for p in [ 'CN=Configuration,DC=samba,DC=example,DC=com', 'DC=samba,DC=example,DC=com', 'CN=Schema,CN=Configuration,DC=samba,DC=example,DC=com' ]: self.assertRegexpMatches( inbound, r'%s\n' r'\tDefault-First-Site-Name\\[A-Z]+ via RPC\n' r'\t\tDSA object GUID: %s\n' r'\t\tLast attempt @ [^\n]+\n' r'\t\t\d+ consecutive failure\(s\).\n' r'\t\tLast success @ [^\n]+\n' r'\n' % (p, GUID_RE), msg="%s inbound missing" % p) self.assertRegexpMatches( outbound, r'%s\n' r'\tDefault-First-Site-Name\\[A-Z]+ via RPC\n' r'\t\tDSA object GUID: %s\n' r'\t\tLast attempt @ [^\n]+\n' r'\t\t\d+ consecutive failure\(s\).\n' r'\t\tLast success @ [^\n]+\n' r'\n' % (p, GUID_RE), msg="%s outbound missing" % p) self.assertRegexpMatches( conn, r'Connection --\n' r'\tConnection name: %s\n' r'\tEnabled : TRUE\n' r'\tServer DNS name : \w+.samba.example.com\n' r'\tServer DN name : %s' r'\n' % (GUID_RE, DN_RE))
def test_setpassword(self): for user in self.users: newpasswd = self.random_password(16) (result, out, err) = self.runsubcmd( "user", "setpassword", user["name"], "--newpassword=%s" % newpasswd, "-H", "ldap://%s" % os.environ["DC_SERVER"], "-U%s%%%s" % (os.environ["DC_USERNAME"], os.environ["DC_PASSWORD"])) self.assertCmdSuccess(result, out, err, "Ensure setpassword runs") self.assertEqual(err, "", "setpassword with url") self.assertMatch(out, "Changed password OK", "setpassword with url") attributes = "sAMAccountName,unicodePwd,supplementalCredentials,virtualClearTextUTF8,virtualClearTextUTF16,virtualSSHA,virtualSambaGPG" (result, out, err) = self.runsubcmd("user", "syncpasswords", "--cache-ldb-initialize", "--attributes=%s" % attributes, "--decrypt-samba-gpg") self.assertCmdSuccess( result, out, err, "Ensure syncpasswords --cache-ldb-initialize runs") self.assertEqual(err, "", "getpassword without url") cache_attrs = { "objectClass": { "value": "userSyncPasswords" }, "samdbUrl": {}, "dirsyncFilter": {}, "dirsyncAttribute": {}, "dirsyncControl": { "value": "dirsync:1:0:0" }, "passwordAttribute": {}, "decryptSambaGPG": {}, "currentTime": {}, } for a in cache_attrs.keys(): v = cache_attrs[a].get("value", "") self.assertMatch( out, "%s: %s" % (a, v), "syncpasswords --cache-ldb-initialize: %s: %s out[%s]" % (a, v, out)) (result, out, err) = self.runsubcmd("user", "syncpasswords", "--no-wait") self.assertCmdSuccess(result, out, err, "Ensure syncpasswords --no-wait runs") self.assertEqual(err, "", "syncpasswords --no-wait") self.assertMatch( out, "dirsync_loop(): results 0", "syncpasswords --no-wait: 'dirsync_loop(): results 0': out[%s]" % (out)) for user in self.users: self.assertMatch( out, "sAMAccountName: %s" % (user["name"]), "syncpasswords --no-wait: 'sAMAccountName': %s out[%s]" % (user["name"], out)) for user in self.users: newpasswd = self.random_password(16) creds = credentials.Credentials() creds.set_anonymous() creds.set_password(newpasswd) nthash = creds.get_nt_hash() unicodePwd = base64.b64encode(creds.get_nt_hash()).decode('utf8') virtualClearTextUTF8 = base64.b64encode( get_bytes(newpasswd)).decode('utf8') virtualClearTextUTF16 = base64.b64encode( get_string(newpasswd).encode('utf-16-le')).decode('utf8') (result, out, err) = self.runsubcmd("user", "setpassword", user["name"], "--newpassword=%s" % newpasswd) self.assertCmdSuccess(result, out, err, "Ensure setpassword runs") self.assertEqual(err, "", "setpassword without url") self.assertMatch(out, "Changed password OK", "setpassword without url") (result, out, err) = self.runsubcmd("user", "syncpasswords", "--no-wait") self.assertCmdSuccess(result, out, err, "Ensure syncpasswords --no-wait runs") self.assertEqual(err, "", "syncpasswords --no-wait") self.assertMatch( out, "dirsync_loop(): results 0", "syncpasswords --no-wait: 'dirsync_loop(): results 0': out[%s]" % (out)) self.assertMatch( out, "sAMAccountName: %s" % (user["name"]), "syncpasswords --no-wait: 'sAMAccountName': %s out[%s]" % (user["name"], out)) self.assertMatch( out, "# unicodePwd::: REDACTED SECRET ATTRIBUTE", "getpassword '# unicodePwd::: REDACTED SECRET ATTRIBUTE': out[%s]" % out) self.assertMatch(out, "unicodePwd:: %s" % unicodePwd, "getpassword unicodePwd: out[%s]" % out) self.assertMatch( out, "# supplementalCredentials::: REDACTED SECRET ATTRIBUTE", "getpassword '# supplementalCredentials::: REDACTED SECRET ATTRIBUTE': out[%s]" % out) self.assertMatch( out, "supplementalCredentials:: ", "getpassword supplementalCredentials: out[%s]" % out) if "virtualSambaGPG:: " in out: self.assertMatch( out, "virtualClearTextUTF8:: %s" % virtualClearTextUTF8, "getpassword virtualClearTextUTF8: out[%s]" % out) self.assertMatch( out, "virtualClearTextUTF16:: %s" % virtualClearTextUTF16, "getpassword virtualClearTextUTF16: out[%s]" % out) self.assertMatch(out, "virtualSSHA: ", "getpassword virtualSSHA: out[%s]" % out) (result, out, err) = self.runsubcmd("user", "getpassword", user["name"], "--attributes=%s" % attributes, "--decrypt-samba-gpg") self.assertCmdSuccess(result, out, err, "Ensure getpassword runs") self.assertEqual(err, "", "getpassword without url") self.assertMatch(out, "Got password OK", "getpassword without url") self.assertMatch( out, "sAMAccountName: %s" % (user["name"]), "getpassword: '******': %s out[%s]" % (user["name"], out)) self.assertMatch(out, "unicodePwd:: %s" % unicodePwd, "getpassword unicodePwd: out[%s]" % out) self.assertMatch( out, "supplementalCredentials:: ", "getpassword supplementalCredentials: out[%s]" % out) self._verify_supplementalCredentials( out.replace("\nGot password OK\n", "")) if "virtualSambaGPG:: " in out: self.assertMatch( out, "virtualClearTextUTF8:: %s" % virtualClearTextUTF8, "getpassword virtualClearTextUTF8: out[%s]" % out) self.assertMatch( out, "virtualClearTextUTF16:: %s" % virtualClearTextUTF16, "getpassword virtualClearTextUTF16: out[%s]" % out) self.assertMatch(out, "virtualSSHA: ", "getpassword virtualSSHA: out[%s]" % out) for user in self.users: newpasswd = self.random_password(16) (result, out, err) = self.runsubcmd( "user", "setpassword", user["name"], "--newpassword=%s" % newpasswd, "--must-change-at-next-login", "-H", "ldap://%s" % os.environ["DC_SERVER"], "-U%s%%%s" % (os.environ["DC_USERNAME"], os.environ["DC_PASSWORD"])) self.assertCmdSuccess(result, out, err, "Ensure setpassword runs") self.assertEqual(err, "", "setpassword with forced change") self.assertMatch(out, "Changed password OK", "setpassword with forced change")