class SambaOCHelper(object): def __init__(self): self.samba_lp = LoadParm() self.samba_lp.set('debug level', '0') self.samba_lp.load_default() url = self.samba_lp.get('dcerpc_mapiproxy:samdb_url') or \ self.samba_lp.private_path("sam.ldb") self.samdb = SamDB(url=url, lp=self.samba_lp, session_info=system_session()) self.conn = self._open_mysql_connection() def _open_mysql_connection(self): connection_string = self.samba_lp.get('mapiproxy:openchangedb') if not connection_string: raise Exception( "Not found mapiproxy:openchangedb on samba configuration") # mysql://openchange:password@localhost/openchange m = re.search( r'(?P<scheme>.+)://(?P<user>.+):(?P<pass>.+)@(?P<host>.+)/(?P<db>.+)', connection_string) if not m: raise Exception("Unable to parse mapiproxy:openchangedb: %s" % connection_string) group_dict = m.groupdict() if group_dict['scheme'] != 'mysql': raise Exception( "mapiproxy:openchangedb should start with mysql:// (we got %s)", group_dict['scheme']) conn = MySQLdb.connect(host=group_dict['host'], user=group_dict['user'], passwd=group_dict['pass'], db=group_dict['db']) conn.autocommit(True) return conn def invalid_user(self, username): ret = self.samdb.search(base=self.samdb.domain_dn(), scope=ldb.SCOPE_SUBTREE, expression="(sAMAccountName=%s)" % ldb.binary_encode(username)) return len(ret) != 1 def find_email_of(self, username): ret = self.samdb.search(base=self.samdb.domain_dn(), scope=ldb.SCOPE_SUBTREE, attrs=["mail"], expression="(sAMAccountName=%s)" % ldb.binary_encode(username)) return ret[0]["mail"][0] def active_openchange_users(self): c = self.conn.cursor() c.execute("SELECT name FROM mailboxes") return sorted([row[0] for row in c.fetchall()])
class SambaOCHelper(object): def __init__(self): self.samba_lp = LoadParm() self.samba_lp.set('debug level', '0') self.samba_lp.load_default() url = self.samba_lp.get('dcerpc_mapiproxy:samdb_url') or \ self.samba_lp.private_path("sam.ldb") self.samdb = SamDB(url=url, lp=self.samba_lp, session_info=system_session()) self.conn = self._open_mysql_connection() def _open_mysql_connection(self): connection_string = self.samba_lp.get('mapiproxy:openchangedb') if not connection_string: raise Exception("Not found mapiproxy:openchangedb on samba configuration") # mysql://openchange:password@localhost/openchange m = re.search(r'(?P<scheme>.+)://(?P<user>.+):(?P<pass>.+)@(?P<host>.+)/(?P<db>.+)', connection_string) if not m: raise Exception("Unable to parse mapiproxy:openchangedb: %s" % connection_string) group_dict = m.groupdict() if group_dict['scheme'] != 'mysql': raise Exception("mapiproxy:openchangedb should start with mysql:// (we got %s)", group_dict['scheme']) conn = MySQLdb.connect(host=group_dict['host'], user=group_dict['user'], passwd=group_dict['pass'], db=group_dict['db']) conn.autocommit(True) return conn def invalid_user(self, username): ret = self.samdb.search(base=self.samdb.domain_dn(), scope=ldb.SCOPE_SUBTREE, expression="(sAMAccountName=%s)" % ldb.binary_encode(username)) return len(ret) != 1 def find_email_of(self, username): ret = self.samdb.search(base=self.samdb.domain_dn(), scope=ldb.SCOPE_SUBTREE, attrs=["mail"], expression="(sAMAccountName=%s)" % ldb.binary_encode(username)) return ret[0]["mail"][0] def active_openchange_users(self): c = self.conn.cursor() c.execute("SELECT name FROM mailboxes") return sorted([row[0] for row in c.fetchall()]) def get_indexing_cache(self): memcached_server = self.samba_lp.get('mapistore:indexing_cache') if not memcached_server: return "127.0.0.1:11211" # This should has a format like: --SERVER=11.22.33.44:11211 return memcached_server.split('=')[1]
class Options: def __init__(self): self.function_level = None self.use_xattrs = "auto" self.lp = LoadParm(); self.lp.load_default(); self.realm = self.lp.get('realm') #default self.domain = self.lp.get('workgroup') #default self.adminpass = '' self.smbconf = self.lp.configfile self.server_role = self.lp.get("server role") #default self.samdb_fill = FILL_FULL #default self.blank = False self.partitions_only = False
class SambaOCHelper(object): def __init__(self): self.samba_lp = LoadParm() self.samba_lp.set("debug level", "0") self.samba_lp.load_default() url = self.samba_lp.get("dcerpc_mapiproxy:samdb_url") or self.samba_lp.private_path("sam.ldb") self.samdb = SamDB(url=url, lp=self.samba_lp, session_info=system_session()) self.conn = self._open_mysql_connection() def _open_mysql_connection(self): connection_string = self.samba_lp.get("mapiproxy:openchangedb") if not connection_string: raise Exception("Not found mapiproxy:openchangedb on samba configuration") # mysql://openchange:password@localhost/openchange m = re.search(r"(?P<scheme>.+)://(?P<user>.+):(?P<pass>.+)@(?P<host>.+)/(?P<db>.+)", connection_string) if not m: raise Exception("Unable to parse mapiproxy:openchangedb: %s" % connection_string) group_dict = m.groupdict() if group_dict["scheme"] != "mysql": raise Exception("mapiproxy:openchangedb should start with mysql:// (we got %s)", group_dict["scheme"]) conn = MySQLdb.connect( host=group_dict["host"], user=group_dict["user"], passwd=group_dict["pass"], db=group_dict["db"] ) conn.autocommit(True) return conn def invalid_user(self, username): ret = self.samdb.search( base=self.samdb.domain_dn(), scope=ldb.SCOPE_SUBTREE, expression="(sAMAccountName=%s)" % ldb.binary_encode(username), ) return len(ret) != 1 def find_email_of(self, username): ret = self.samdb.search( base=self.samdb.domain_dn(), scope=ldb.SCOPE_SUBTREE, attrs=["mail"], expression="(sAMAccountName=%s)" % ldb.binary_encode(username), ) return ret[0]["mail"][0] def active_openchange_users(self): c = self.conn.cursor() c.execute("SELECT name FROM mailboxes") return sorted([row[0] for row in c.fetchall()])
class DCKeytabTests(tests.TestCase): def setUp(self): super(DCKeytabTests, self).setUp() self.lp = LoadParm() self.lp.load_default() self.creds = self.insta_creds(template=self.get_credentials()) self.ktfile = os.path.join(self.lp.get('private dir'), 'test.keytab') self.principal = self.creds.get_principal() def tearDown(self): super(DCKeytabTests, self).tearDown() os.remove(self.ktfile) def test_export_keytab(self): net = Net(None, self.lp) net.export_keytab(keytab=self.ktfile, principal=self.principal) assert os.path.exists(self.ktfile), 'keytab was not created' with open_bytes(self.ktfile) as bytes_kt: result = '' for c in bytes_kt.read(): if c in string.printable: result += c principal_parts = self.principal.split('@') assert principal_parts[0] in result and \ principal_parts[1] in result, \ 'Principal not found in generated keytab'
def user_policy_apply(user, password): user = user.split('\\')[-1] logger = logging.getLogger('gpupdate') logger.addHandler(logging.StreamHandler(sys.stdout)) logger.setLevel(logging.CRITICAL) log_level = lp.log_level() if log_level == 1: logger.setLevel(logging.ERROR) elif log_level == 2: logger.setLevel(logging.WARNING) elif log_level == 3: logger.setLevel(logging.INFO) elif log_level >= 4: logger.setLevel(logging.DEBUG) lp = LoadParm() lp.load_default() creds = Credentials() creds.guess(lp) creds.set_username(user) creds.set_password(password) _, user_exts = get_gp_client_side_extensions(logger) gp_extensions = [] for ext in user_exts: gp_extensions.append(ext(logger, lp, creds, store)) cache_dir = lp.get('cache directory') store = GPOStorage(os.path.join(cache_dir, 'gpo.tdb')) try: apply_gp(lp, creds, logger, store, gp_extensions) except NTSTATUSError as e: logger.info(e.message)
class AuthSMB4(object): def __init__(self, user, password): self.user = user self.password = password _isLastErrorAvailable = False self.lp = LoadParm() self.lp.load_default() self.ip = '127.0.0.1' self.WorkGroup = str(self.lp.get("workgroup")) self.creds = credentials.Credentials() self.creds.set_username(self.user) self.creds.set_password(self.password) self.creds.set_domain(self.WorkGroup) self.creds.set_workstation("") self.logger = logging.getLogger(__name__) self.logger.addHandler(logging.StreamHandler(sys.stdout)) self.logger.setLevel(logging.INFO) def Autenticate(self): try: session_info_flags = (AUTH_SESSION_INFO_DEFAULT_GROUPS | AUTH_SESSION_INFO_AUTHENTICATED) LdapConn = samba.Ldb("ldap://%s" % self.ip, lp=self.lp, credentials=self.creds) DomainDN = LdapConn.get_default_basedn() search_filter = "sAMAccountName=%s" % self.user res = LdapConn.search(base=DomainDN, scope=SCOPE_SUBTREE, expression=search_filter, attrs=["dn"]) if len(res) == 0: return False user_dn = res[0].dn session = samba.auth.user_session( LdapConn, lp_ctx=self.lp, dn=user_dn, session_info_flags=session_info_flags) token = session.security_token if (token.has_builtin_administrators()): return True if (token.is_system()): return True except Exception, e: if (len(e.args) > 1): self.logger.info("%s %s" % (e.args[1], e.args[0])) self.SetError(e.args[1], e.args[0]) else: self.logger.info("%s " % (e.args[0])) self.SetError(e.args, 0) return False
class GPOTests(tests.TestCase): def setUp(self): super(GPOTests, self).setUp() self.server = os.environ["SERVER"] self.lp = LoadParm() self.lp.load_default() self.creds = self.insta_creds(template=self.get_credentials()) def tearDown(self): super(GPOTests, self).tearDown() def test_gpo_list(self): global poldir, dspath ads = gpo.ADS_STRUCT(self.server, self.lp, self.creds) if ads.connect(): gpos = ads.get_gpo_list(self.creds.get_username()) guid = '{31B2F340-016D-11D2-945F-00C04FB984F9}' names = ['Local Policy', guid] file_sys_paths = [None, '%s\\%s' % (poldir, guid)] ds_paths = [None, 'CN=%s,%s' % (guid, dspath)] for i in range(0, len(gpos)): assert gpos[i].name == names[i], \ 'The gpo name did not match expected name %s' % gpos[i].name assert gpos[i].file_sys_path == file_sys_paths[i], \ 'file_sys_path did not match expected %s' % gpos[i].file_sys_path assert gpos[i].ds_path == ds_paths[i], \ 'ds_path did not match expected %s' % gpos[i].ds_path def test_gpo_ads_does_not_segfault(self): try: ads = gpo.ADS_STRUCT(self.server, 42, self.creds) except: pass def test_gpt_version(self): global gpt_data local_path = self.lp.get("path", "sysvol") policies = 'addom.samba.example.com/Policies' guid = '{31B2F340-016D-11D2-945F-00C04FB984F9}' gpo_path = os.path.join(local_path, policies, guid) old_vers = gpo.gpo_get_sysvol_gpt_version(gpo_path)[1] with open(os.path.join(gpo_path, 'GPT.INI'), 'w') as gpt: gpt.write(gpt_data % 42) assert gpo.gpo_get_sysvol_gpt_version(gpo_path)[1] == 42, \ 'gpo_get_sysvol_gpt_version() did not return the expected version' with open(os.path.join(gpo_path, 'GPT.INI'), 'w') as gpt: gpt.write(gpt_data % old_vers) assert gpo.gpo_get_sysvol_gpt_version(gpo_path)[1] == old_vers, \ 'gpo_get_sysvol_gpt_version() did not return the expected version'
class AuthSMB4(object): def __init__(self,user,password): self.user = user self.password = password _isLastErrorAvailable=False self.lp = LoadParm() self.lp.load_default() self.ip = '127.0.0.1' self.WorkGroup = str(self.lp.get("workgroup")) self.creds = credentials.Credentials() self.creds.set_username(self.user) self.creds.set_password(self.password) self.creds.set_domain(self.WorkGroup) self.creds.set_workstation("") self.logger = logging.getLogger(__name__) self.logger.addHandler(logging.StreamHandler(sys.stdout)) self.logger.setLevel(logging.INFO) def Autenticate(self): try: session_info_flags = ( AUTH_SESSION_INFO_DEFAULT_GROUPS | AUTH_SESSION_INFO_AUTHENTICATED ) LdapConn = samba.Ldb("ldap://%s" % self.ip,lp=self.lp,credentials=self.creds) DomainDN = LdapConn.get_default_basedn() search_filter="sAMAccountName=%s" % self.user res = LdapConn.search(base=DomainDN, scope=SCOPE_SUBTREE,expression=search_filter, attrs=["dn"]) if len(res) == 0: return False user_dn = res[0].dn session = samba.auth.user_session(LdapConn, lp_ctx=self.lp, dn=user_dn,session_info_flags=session_info_flags) token = session.security_token if (token.has_builtin_administrators()): return True if(token.is_system()): return True except Exception,e: if(len(e.args)>1): self.logger.info("%s %s" % (e.args[1],e.args[0])) self.SetError(e.args[1],e.args[0]) else: self.logger.info("%s " % (e.args[0])) self.SetError(e.args,0) return False
class auth_base(object): def __init__(self, user, password): self.user = user self.password = password self.lp = LoadParm() self.lp.load_default() self.ip = '127.0.0.1' self.WorkGroup = str(self.lp.get("workgroup")) self.creds = credentials.Credentials() self.creds.set_username(self.user) self.creds.set_password(self.password) self.creds.set_domain(self.WorkGroup) self.creds.set_workstation("") def autenticate(self): try: session_info_flags = (df_gp1 | df_gp2) LdapConn = samba.Ldb("ldap://%s" % self.ip, lp=self.lp, credentials=self.creds) DomainDN = LdapConn.get_default_basedn() search_filter = "sAMAccountName=%s" % self.user res = LdapConn.search(base=DomainDN, scope=SCOPE_SUBTREE, expression=search_filter, attrs=["dn"]) if len(res) == 0: return False user_dn = res[0].dn session = samba.auth.user_session( LdapConn, lp_ctx=self.lp, dn=user_dn, session_info_flags=session_info_flags) token = session.security_token if (token.has_builtin_administrators()): return True if (token.is_system()): return True except Exception: return False return False
def test_admx_load(self): lp = LoadParm() lp.load(os.environ['SERVERCONFFILE']) local_path = lp.get('path', 'sysvol') admx_path = os.path.join(local_path, os.environ['REALM'].lower(), 'Policies', 'PolicyDefinitions') (result, out, err) = self.runsubcmd( "gpo", "admxload", "-H", "ldap://%s" % os.environ["SERVER"], "--admx-dir=%s" % os.path.join(source_path, 'libgpo/admx'), "-U%s%%%s" % (os.environ["USERNAME"], os.environ["PASSWORD"])) self.assertCmdSuccess(result, out, err, 'Filling PolicyDefinitions failed') self.assertTrue(os.path.exists(admx_path), 'PolicyDefinitions was not created') self.assertTrue(os.path.exists(os.path.join(admx_path, 'samba.admx')), 'Filling PolicyDefinitions failed') shutil.rmtree(admx_path)
class GPOTests(tests.TestCase): def setUp(self): super(GPOTests, self).setUp() self.server = os.environ["SERVER"] self.dc_account = self.server.upper() + '$' self.lp = LoadParm() self.lp.load_default() self.creds = self.insta_creds(template=self.get_credentials()) def tearDown(self): super(GPOTests, self).tearDown() def test_sec_ext_load_on_member(self): logger = logging.getLogger('gpo_tests') cache_dir = self.lp.get('cache directory') store = GPOStorage(os.path.join(cache_dir, 'gpo.tdb')) try: gp_access_ext(logger, self.lp, self.creds, store) except Exception: self.fail('Initializing gp_access_ext should not require ad-dc')
def test_vgp_access_add(self): lp = LoadParm() lp.load(os.environ['SERVERCONFFILE']) (result, out, err) = self.runsublevelcmd( "gpo", ("manage", "access", "add"), self.gpo_guid, "allow", self.test_user, lp.get('realm').lower(), "-H", "ldap://%s" % os.environ["SERVER"], "-U%s%%%s" % (os.environ["USERNAME"], os.environ["PASSWORD"])) self.assertCmdSuccess(result, out, err, 'Access add failed') (result, out, err) = self.runsublevelcmd( "gpo", ("manage", "access", "add"), self.gpo_guid, "deny", self.test_group, lp.get('realm').lower(), "-H", "ldap://%s" % os.environ["SERVER"], "-U%s%%%s" % (os.environ["USERNAME"], os.environ["PASSWORD"])) self.assertCmdSuccess(result, out, err, 'Access add failed') allow_entry = '+:%s\\%s:ALL' % (lp.get('realm').lower(), self.test_user) deny_entry = '-:%s\\%s:ALL' % (lp.get('realm').lower(), self.test_group) (result, out, err) = self.runsublevelcmd( "gpo", ("manage", "access", "list"), self.gpo_guid, "-H", "ldap://%s" % os.environ["SERVER"], "-U%s%%%s" % (os.environ["USERNAME"], os.environ["PASSWORD"])) self.assertIn(allow_entry, out, 'The test entry was not found!') self.assertIn(deny_entry, out, 'The test entry was not found!') (result, out, err) = self.runsublevelcmd( "gpo", ("manage", "access", "remove"), self.gpo_guid, "allow", self.test_user, lp.get('realm').lower(), "-H", "ldap://%s" % os.environ["SERVER"], "-U%s%%%s" % (os.environ["USERNAME"], os.environ["PASSWORD"])) self.assertCmdSuccess(result, out, err, 'Access remove failed') (result, out, err) = self.runsublevelcmd( "gpo", ("manage", "access", "remove"), self.gpo_guid, "deny", self.test_group, lp.get('realm').lower(), "-H", "ldap://%s" % os.environ["SERVER"], "-U%s%%%s" % (os.environ["USERNAME"], os.environ["PASSWORD"])) self.assertCmdSuccess(result, out, err, 'Access remove failed') (result, out, err) = self.runsublevelcmd( "gpo", ("manage", "access", "list"), self.gpo_guid, "-H", "ldap://%s" % os.environ["SERVER"], "-U%s%%%s" % (os.environ["USERNAME"], os.environ["PASSWORD"])) self.assertNotIn(allow_entry, out, 'The test entry was still found!') self.assertNotIn(deny_entry, out, 'The test entry was still found!')
def setUpClass(self): # connect to the server's messaging bus (we need to explicitly load a # different smb.conf here, because in all other respects this test # wants to act as a separate remote client) server_conf = os.getenv('SERVERCONFFILE') if server_conf: lp_ctx = LoadParm(filename_for_non_global_lp=server_conf) else: samba.tests.env_loadparm() self.msg_ctx = Messaging((1,), lp_ctx=lp_ctx) self.msg_ctx.irpc_add_name(AUTH_EVENT_NAME) # Now switch back to using the client-side smb.conf. The tests will # use the first interface in the client.conf (we need to strip off # the subnet mask portion) lp_ctx = samba.tests.env_loadparm() client_ip_and_mask = lp_ctx.get('interfaces')[0] client_ip = client_ip_and_mask.split('/')[0] # the messaging ctx is the server's view of the world, so our own # client IP will be the remoteAddress when connections are logged self.remoteAddress = client_ip def messageHandler(context, msgType, src, message): # This does not look like sub unit output and it # makes these tests much easier to debug. print(message) jsonMsg = json.loads(message) context["messages"].append(jsonMsg) self.context = {"messages": []} self.msg_handler_and_context = (messageHandler, self.context) self.msg_ctx.register(self.msg_handler_and_context, msg_type=MSG_AUTH_LOG) self.remoteAddress = None self.server = os.environ["SERVER"] self.connection = None
def setUp(self): super(AuditLogTestBase, self).setUp() # connect to the server's messaging bus (we need to explicitly load a # different smb.conf here, because in all other respects this test # wants to act as a separate remote client) server_conf = os.getenv('SERVERCONFFILE') if server_conf: lp_ctx = LoadParm(filename_for_non_global_lp=server_conf) else: lp_ctx = self.get_loadparm() self.msg_ctx = Messaging((1,), lp_ctx=lp_ctx) self.msg_ctx.irpc_add_name(self.event_type) # Now switch back to using the client-side smb.conf. The tests will # use the first interface in the client.conf (we need to strip off # the subnet mask portion) lp_ctx = self.get_loadparm() client_ip_and_mask = lp_ctx.get('interfaces')[0] client_ip = client_ip_and_mask.split('/')[0] # the messaging ctx is the server's view of the world, so our own # client IP will be the remoteAddress when connections are logged self.remoteAddress = client_ip # # Check the remote address of a message against the one being used # for the tests. # def isRemote(message): audit = getAudit(message) if audit is None: return False remote = audit["remoteAddress"] if remote is None: return False try: addr = remote.split(":") return addr[1] == self.remoteAddress except IndexError: return False def messageHandler(context, msgType, src, message): # This does not look like sub unit output and it # makes these tests much easier to debug. print(message) jsonMsg = json.loads(message) if ((jsonMsg["type"] == "passwordChange" or jsonMsg["type"] == "dsdbChange" or jsonMsg["type"] == "groupChange") and isRemote(jsonMsg)): context["messages"].append(jsonMsg) elif jsonMsg["type"] == "dsdbTransaction": context["txnMessage"] = jsonMsg self.context = {"messages": [], "txnMessage": None} self.msg_handler_and_context = (messageHandler, self.context) self.msg_ctx.register(self.msg_handler_and_context, msg_type=self.message_type) self.msg_ctx.irpc_add_name(AUTH_EVENT_NAME) def authHandler(context, msgType, src, message): jsonMsg = json.loads(message) if jsonMsg["type"] == "Authorization" and isRemote(jsonMsg): # This does not look like sub unit output and it # makes these tests much easier to debug. print(message) context["sessionId"] = jsonMsg["Authorization"]["sessionId"] context["serviceDescription"] =\ jsonMsg["Authorization"]["serviceDescription"] self.auth_context = {"sessionId": "", "serviceDescription": ""} self.auth_handler_and_context = (authHandler, self.auth_context) self.msg_ctx.register(self.auth_handler_and_context, msg_type=MSG_AUTH_LOG) self.discardMessages() self.server = os.environ["SERVER"] self.connection = None
class GPOTests(tests.TestCase): def setUp(self): super(GPOTests, self).setUp() self.server = os.environ["SERVER"] self.lp = LoadParm() self.lp.load_default() self.creds = self.insta_creds(template=self.get_credentials()) def tearDown(self): super(GPOTests, self).tearDown() def test_gpo_list(self): global poldir, dspath ads = gpo.ADS_STRUCT(self.server, self.lp, self.creds) if ads.connect(): gpos = ads.get_gpo_list(self.creds.get_username()) guid = '{31B2F340-016D-11D2-945F-00C04FB984F9}' names = ['Local Policy', guid] file_sys_paths = [None, '%s\\%s' % (poldir, guid)] ds_paths = [None, 'CN=%s,%s' % (guid, dspath)] for i in range(0, len(gpos)): self.assertEquals( gpos[i].name, names[i], 'The gpo name did not match expected name %s' % gpos[i].name) self.assertEquals( gpos[i].file_sys_path, file_sys_paths[i], 'file_sys_path did not match expected %s' % gpos[i].file_sys_path) self.assertEquals( gpos[i].ds_path, ds_paths[i], 'ds_path did not match expected %s' % gpos[i].ds_path) def test_gpo_ads_does_not_segfault(self): try: ads = gpo.ADS_STRUCT(self.server, 42, self.creds) except: pass def test_gpt_version(self): global gpt_data local_path = self.lp.cache_path('gpo_cache') policies = 'ADDOM.SAMBA.EXAMPLE.COM/POLICIES' guid = '{31B2F340-016D-11D2-945F-00C04FB984F9}' gpo_path = os.path.join(local_path, policies, guid) old_vers = gpo.gpo_get_sysvol_gpt_version(gpo_path)[1] with open(os.path.join(gpo_path, 'GPT.INI'), 'w') as gpt: gpt.write(gpt_data % 42) self.assertEquals( gpo.gpo_get_sysvol_gpt_version(gpo_path)[1], 42, 'gpo_get_sysvol_gpt_version() did not return the expected version') with open(os.path.join(gpo_path, 'GPT.INI'), 'w') as gpt: gpt.write(gpt_data % old_vers) self.assertEquals( gpo.gpo_get_sysvol_gpt_version(gpo_path)[1], old_vers, 'gpo_get_sysvol_gpt_version() did not return the expected version') def test_check_refresh_gpo_list(self): cache = self.lp.cache_path('gpo_cache') ads = gpo.ADS_STRUCT(self.server, self.lp, self.creds) if ads.connect(): gpos = ads.get_gpo_list(self.creds.get_username()) check_refresh_gpo_list(self.server, self.lp, self.creds, gpos) self.assertTrue(os.path.exists(cache), 'GPO cache %s was not created' % cache) guid = '{31B2F340-016D-11D2-945F-00C04FB984F9}' gpt_ini = os.path.join(cache, 'ADDOM.SAMBA.EXAMPLE.COM/POLICIES', guid, 'GPT.INI') self.assertTrue(os.path.exists(gpt_ini), 'GPT.INI was not cached for %s' % guid) def test_check_refresh_gpo_list_malicious_paths(self): # the path cannot contain .. path = '/usr/local/samba/var/locks/sysvol/../../../../../../root/' self.assertRaises(OSError, check_safe_path, path) self.assertEqual(check_safe_path('/etc/passwd'), 'etc/passwd') self.assertEqual(check_safe_path('\\\\etc/\\passwd'), 'etc/passwd') # there should be no backslashes used to delineate paths before = 'sysvol/addom.samba.example.com\\Policies/' \ '{31B2F340-016D-11D2-945F-00C04FB984F9}\\GPT.INI' after = 'addom.samba.example.com/Policies/' \ '{31B2F340-016D-11D2-945F-00C04FB984F9}/GPT.INI' result = check_safe_path(before) self.assertEquals( result, after, 'check_safe_path() didn\'t' ' correctly convert \\ to /') def test_gpt_ext_register(self): this_path = os.path.dirname(os.path.realpath(__file__)) samba_path = os.path.realpath(os.path.join(this_path, '../../../')) ext_path = os.path.join(samba_path, 'python/samba/gp_sec_ext.py') ext_guid = '{827D319E-6EAC-11D2-A4EA-00C04F79F83A}' ret = register_gp_extension(ext_guid, 'gp_sec_ext', ext_path, smb_conf=self.lp.configfile, machine=True, user=False) self.assertTrue(ret, 'Failed to register a gp ext') gp_exts = list_gp_extensions(self.lp.configfile) self.assertTrue(ext_guid in gp_exts.keys(), 'Failed to list gp exts') self.assertEquals(gp_exts[ext_guid]['DllName'], ext_path, 'Failed to list gp exts') unregister_gp_extension(ext_guid) gp_exts = list_gp_extensions(self.lp.configfile) self.assertTrue(ext_guid not in gp_exts.keys(), 'Failed to unregister gp exts') self.assertTrue(check_guid(ext_guid), 'Failed to parse valid guid') self.assertFalse(check_guid('AAAAAABBBBBBBCCC'), 'Parsed invalid guid') lp, parser = parse_gpext_conf(self.lp.configfile) self.assertTrue(lp and parser, 'parse_gpext_conf() invalid return') parser.add_section('test_section') parser.set('test_section', 'test_var', ext_guid) atomic_write_conf(lp, parser) lp, parser = parse_gpext_conf(self.lp.configfile) self.assertTrue('test_section' in parser.sections(), 'test_section not found in gpext.conf') self.assertEquals(parser.get('test_section', 'test_var'), ext_guid, 'Failed to find test variable in gpext.conf') parser.remove_section('test_section') atomic_write_conf(lp, parser) def test_gp_log_get_applied(self): local_path = self.lp.get('path', 'sysvol') guids = [ '{31B2F340-016D-11D2-945F-00C04FB984F9}', '{6AC1786C-016F-11D2-945F-00C04FB984F9}' ] gpofile = '%s/addom.samba.example.com/Policies/%s/MACHINE/Microsoft/' \ 'Windows NT/SecEdit/GptTmpl.inf' stage = '[System Access]\nMinimumPasswordAge = 998\n' cache_dir = self.lp.get('cache directory') store = GPOStorage(os.path.join(cache_dir, 'gpo.tdb')) for guid in guids: gpttmpl = gpofile % (local_path, guid) ret = stage_file(gpttmpl, stage) self.assertTrue(ret, 'Could not create the target %s' % gpttmpl) ret = gpupdate_force(self.lp) self.assertEquals(ret, 0, 'gpupdate force failed') gp_db = store.get_gplog('ADDC$') applied_guids = gp_db.get_applied_guids() self.assertEquals(len(applied_guids), 2, 'The guids were not found') self.assertIn(guids[0], applied_guids, '%s not in applied guids' % guids[0]) self.assertIn(guids[1], applied_guids, '%s not in applied guids' % guids[1]) applied_settings = gp_db.get_applied_settings(applied_guids) for policy in applied_settings: self.assertIn('System Access', policy[1], 'System Access policies not set') self.assertIn('minPwdAge', policy[1]['System Access'], 'minPwdAge policy not set') if policy[0] == guids[0]: self.assertEqual(int(policy[1]['System Access']['minPwdAge']), days2rel_nttime(1), 'minPwdAge policy not set') elif policy[0] == guids[1]: self.assertEqual(int(policy[1]['System Access']['minPwdAge']), days2rel_nttime(998), 'minPwdAge policy not set') ads = gpo.ADS_STRUCT(self.server, self.lp, self.creds) if ads.connect(): gpos = ads.get_gpo_list('ADDC$') del_gpos = get_deleted_gpos_list(gp_db, gpos[:-1]) self.assertEqual(len(del_gpos), 1, 'Returned delete gpos is incorrect') self.assertEqual(guids[-1], del_gpos[0][0], 'GUID for delete gpo is incorrect') self.assertIn('System Access', del_gpos[0][1], 'System Access policies not set for removal') self.assertIn('minPwdAge', del_gpos[0][1]['System Access'], 'minPwdAge policy not set for removal') for guid in guids: gpttmpl = gpofile % (local_path, guid) unstage_file(gpttmpl) ret = gpupdate_unapply(self.lp) self.assertEquals(ret, 0, 'gpupdate unapply failed') def test_process_group_policy(self): local_path = self.lp.cache_path('gpo_cache') guids = [ '{31B2F340-016D-11D2-945F-00C04FB984F9}', '{6AC1786C-016F-11D2-945F-00C04FB984F9}' ] gpofile = '%s/ADDOM.SAMBA.EXAMPLE.COM/POLICIES/%s/MACHINE/MICROSOFT/' \ 'WINDOWS NT/SECEDIT/GPTTMPL.INF' logger = logging.getLogger('gpo_tests') cache_dir = self.lp.get('cache directory') store = GPOStorage(os.path.join(cache_dir, 'gpo.tdb')) machine_creds = Credentials() machine_creds.guess(self.lp) machine_creds.set_machine_account() # Initialize the group policy extension ext = gp_sec_ext(logger, self.lp, machine_creds, store) ads = gpo.ADS_STRUCT(self.server, self.lp, machine_creds) if ads.connect(): gpos = ads.get_gpo_list(machine_creds.get_username()) stage = '[Kerberos Policy]\nMaxTicketAge = %d\n' opts = [100, 200] for i in range(0, 2): gpttmpl = gpofile % (local_path, guids[i]) ret = stage_file(gpttmpl, stage % opts[i]) self.assertTrue(ret, 'Could not create the target %s' % gpttmpl) # Process all gpos ext.process_group_policy([], gpos) ret = store.get_int('kdc:user_ticket_lifetime') self.assertEqual(ret, opts[1], 'Higher priority policy was not set') # Remove policy gp_db = store.get_gplog(machine_creds.get_username()) del_gpos = get_deleted_gpos_list(gp_db, []) ext.process_group_policy(del_gpos, []) ret = store.get_int('kdc:user_ticket_lifetime') self.assertEqual(ret, None, 'MaxTicketAge should not have applied') # Process just the first gpo ext.process_group_policy([], gpos[:-1]) ret = store.get_int('kdc:user_ticket_lifetime') self.assertEqual(ret, opts[0], 'Lower priority policy was not set') # Remove policy ext.process_group_policy(del_gpos, []) for guid in guids: gpttmpl = gpofile % (local_path, guid) unstage_file(gpttmpl)
setup_add_ldif, setup_ldb ) from samba.provision.sambadns import secretsdb_setup_dns if __name__ == '__main__': ## most of this is extracted from source4/scripting/python/samba/provision/* lp = LoadParm() lp.load('/etc/samba/smb.conf') samdb = SamDB('/var/lib/samba/private/sam.ldb', session_info=system_session(lp), lp=lp) secretsdb = samba.Ldb('/var/lib/samba/private/secrets.ldb', session_info=system_session(lp), lp=lp) paths = ProvisionPaths() paths.private_dir = lp.get("private dir") names = ProvisionNames() # NT domain, kerberos realm, root dn, domain dn, domain dns name names.realm = lp.get("realm").upper() names.domain = lp.get("workgroup").upper() names.domaindn = samdb.domain_dn() names.dnsdomain = samba.ldb.Dn(samdb, names.domaindn).canonical_str().replace("/", "") basedn = samba.dn_from_dns_name(names.dnsdomain) # Get the netbiosname first (could be obtained from smb.conf in theory) res = secretsdb.search(expression="(flatname=%s)" % names.domain,base="CN=Primary Domains", scope=samba.ldb.SCOPE_SUBTREE, attrs=["sAMAccountName"]) names.netbiosname = str(res[0]["sAMAccountName"]).replace("$","")
class MigrationFix(object): def __init__(self, username=None, no_dry_run=False, mysql_string=None): self.lp = LoadParm() self.lp.set('debug level', '0') self.lp.load_default() self.username = username self.no_dry_run = no_dry_run self.conn = self._open_mysql_conn(mysql_string) def _open_mysql_conn(self, mysql_conn): if mysql_conn is None: conn_str = self.lp.get('mapiproxy:openchangedb') else: conn_str = mysql_conn if not conn_str: raise Exception( "No mysql connection string specified and no mapiproxy:openchangedb param option found" ) # mysql://openchange:password@localhost/openchange m = re.search( '(?P<scheme>.+)://(?P<user>.+):(?P<pass>.+)@' '(?P<host>.+)/(?P<db>.+)', conn_str) if not m: raise Exception("Unable to parse mysql connection string: %s" % conn_str) group_dict = m.groupdict() if group_dict['scheme'] != 'mysql': raise Exception( "mysql connection string should start with mysql:// (got %s)", group_dict['scheme']) conn = MySQLdb.connect(host=group_dict['host'], user=group_dict['user'], passwd=group_dict['pass'], db=group_dict['db']) conn.autocommit(True) return conn def fix(self, username, folder_id, uri, mailbox_id, ou_id): error = False c = self.conn.cursor() c.execute( "SELECT fmid FROM mapistore_indexing WHERE username=%s AND url=%s", (username, uri)) result = c.fetchone() if result is None: print '[KO]: %s: could not find fmid for %s' % (username, uri) return True fmid = result[0] if str(fmid) != str(folder_id): error = True if self.no_dry_run is True: c = self.conn.cursor() c.execute( "UPDATE folders SET folder_id=%s WHERE MAPIStoreURI=%s AND mailbox_id=%s AND ou_id=%s", (fmid, uri, mailbox_id, ou_id)) print '[FIX]: %s: folder_id for %s has been fixed and is now set to %s' % ( username, uri, folder_id) else: print '[KO]: %s: Mismatch for %s: found %s, expected %s' % ( username, uri, folder_id, fmid) return error def run_all(self): c = self.conn.cursor() c.execute("SELECT id,ou_id,name FROM mailboxes") rows = c.fetchall() for row in rows: mailbox_id = row[0] ou_id = row[1] username = row[2] # Retrieve all MAPIStoreURI from folders table for username c = self.conn.cursor() c.execute( "SELECT folder_id,MAPIStoreURI FROM folders WHERE mailbox_id=%s AND ou_id=%s AND MAPIStoreURI!=\"\"", (mailbox_id, ou_id)) frows = c.fetchall() # Now check in mapistore_indexing if fmid are matching for given URI gl_error = False for frow in frows: folder_id = frow[0] uri = frow[1] error = self.fix(username, folder_id, uri, mailbox_id, ou_id) if error is True: gl_error = True if gl_error is False: print '[OK]: %s is OK' % (username) def run(self): # Retrieve username id, ou_id c = self.conn.cursor() c.execute("SELECT id,ou_id FROM mailboxes WHERE name=%s", self.username) (mailbox_id, ou_id) = c.fetchone() # Retrieve all MAPIStoreURI from folders table for username c = self.conn.cursor() c.execute( "SELECT folder_id,MAPIStoreURI FROM folders WHERE mailbox_id=%s AND ou_id=%s AND MAPIStoreURI!=\"\"", (mailbox_id, ou_id)) rows = c.fetchall() # Now check in mapistore_indexing if fmid are matching for given URI for row in rows: folder_id = row[0] uri = row[1] self.fix(self.username, folder_id, uri, mailbox_id, ou_id)
def test_smb_conf_ext(self): local_path = self.lp.cache_path('gpo_cache') guid = '{31B2F340-016D-11D2-945F-00C04FB984F9}' reg_pol = os.path.join(local_path, policies, guid, 'MACHINE/REGISTRY.POL') logger = logging.getLogger('gpo_tests') cache_dir = self.lp.get('cache directory') store = GPOStorage(os.path.join(cache_dir, 'gpo.tdb')) machine_creds = Credentials() machine_creds.guess(self.lp) machine_creds.set_machine_account() ads = gpo.ADS_STRUCT(self.server, self.lp, machine_creds) if ads.connect(): gpos = ads.get_gpo_list(machine_creds.get_username()) entries = [] e = preg.entry() e.keyname = 'Software\\Policies\\Samba\\smb_conf\\template homedir' e.type = 1 e.data = '/home/samba/%D/%U' e.valuename = 'template homedir' entries.append(e) e = preg.entry() e.keyname = 'Software\\Policies\\Samba\\smb_conf\\apply group policies' e.type = 4 e.data = 1 e.valuename = 'apply group policies' entries.append(e) e = preg.entry() e.keyname = 'Software\\Policies\\Samba\\smb_conf\\ldap timeout' e.type = 4 e.data = 9999 e.valuename = 'ldap timeout' entries.append(e) stage = preg.file() stage.num_entries = len(entries) stage.entries = entries ret = stage_file(reg_pol, ndr_pack(stage)) self.assertTrue(ret, 'Failed to create the Registry.pol file') with NamedTemporaryFile(suffix='_smb.conf') as f: copyfile(self.lp.configfile, f.name) lp = LoadParm(f.name) # Initialize the group policy extension ext = gp_smb_conf_ext(logger, lp, machine_creds, store) ext.process_group_policy([], gpos) lp = LoadParm(f.name) template_homedir = lp.get('template homedir') self.assertEquals(template_homedir, '/home/samba/%D/%U', 'template homedir was not applied') apply_group_policies = lp.get('apply group policies') self.assertTrue(apply_group_policies, 'apply group policies was not applied') ldap_timeout = lp.get('ldap timeout') self.assertEquals(ldap_timeout, 9999, 'ldap timeout was not applied') # Remove policy gp_db = store.get_gplog(machine_creds.get_username()) del_gpos = get_deleted_gpos_list(gp_db, []) ext.process_group_policy(del_gpos, []) lp = LoadParm(f.name) template_homedir = lp.get('template homedir') self.assertEquals(template_homedir, self.lp.get('template homedir'), 'template homedir was not unapplied') apply_group_policies = lp.get('apply group policies') self.assertEquals(apply_group_policies, self.lp.get('apply group policies'), 'apply group policies was not unapplied') ldap_timeout = lp.get('ldap timeout') self.assertEquals(ldap_timeout, self.lp.get('ldap timeout'), 'ldap timeout was not unapplied') # Unstage the Registry.pol file unstage_file(reg_pol)
class GPOTests(tests.TestCase): def setUp(self): super(GPOTests, self).setUp() self.server = os.environ["SERVER"] self.dc_account = self.server.upper() + '$' self.lp = LoadParm() self.lp.load_default() self.creds = self.insta_creds(template=self.get_credentials()) def tearDown(self): super(GPOTests, self).tearDown() def test_gpo_list(self): global poldir, dspath ads = gpo.ADS_STRUCT(self.server, self.lp, self.creds) if ads.connect(): gpos = ads.get_gpo_list(self.creds.get_username()) guid = '{31B2F340-016D-11D2-945F-00C04FB984F9}' names = ['Local Policy', guid] file_sys_paths = [None, '%s\\%s' % (poldir, guid)] ds_paths = [None, 'CN=%s,%s' % (guid, dspath)] for i in range(0, len(gpos)): self.assertEqual( gpos[i].name, names[i], 'The gpo name did not match expected name %s' % gpos[i].name) self.assertEqual( gpos[i].file_sys_path, file_sys_paths[i], 'file_sys_path did not match expected %s' % gpos[i].file_sys_path) self.assertEqual( gpos[i].ds_path, ds_paths[i], 'ds_path did not match expected %s' % gpos[i].ds_path) def test_gpo_ads_does_not_segfault(self): try: ads = gpo.ADS_STRUCT(self.server, 42, self.creds) except: pass def test_gpt_version(self): global gpt_data local_path = self.lp.cache_path('gpo_cache') guid = '{31B2F340-016D-11D2-945F-00C04FB984F9}' gpo_path = os.path.join(local_path, policies, guid) old_vers = gpo.gpo_get_sysvol_gpt_version(gpo_path)[1] with open(os.path.join(gpo_path, 'GPT.INI'), 'w') as gpt: gpt.write(gpt_data % 42) self.assertEqual( gpo.gpo_get_sysvol_gpt_version(gpo_path)[1], 42, 'gpo_get_sysvol_gpt_version() did not return the expected version') with open(os.path.join(gpo_path, 'GPT.INI'), 'w') as gpt: gpt.write(gpt_data % old_vers) self.assertEqual( gpo.gpo_get_sysvol_gpt_version(gpo_path)[1], old_vers, 'gpo_get_sysvol_gpt_version() did not return the expected version') def test_check_refresh_gpo_list(self): cache = self.lp.cache_path('gpo_cache') ads = gpo.ADS_STRUCT(self.server, self.lp, self.creds) if ads.connect(): gpos = ads.get_gpo_list(self.creds.get_username()) check_refresh_gpo_list(self.server, self.lp, self.creds, gpos) self.assertTrue(os.path.exists(cache), 'GPO cache %s was not created' % cache) guid = '{31B2F340-016D-11D2-945F-00C04FB984F9}' gpt_ini = os.path.join(cache, policies, guid, 'GPT.INI') self.assertTrue(os.path.exists(gpt_ini), 'GPT.INI was not cached for %s' % guid) def test_check_refresh_gpo_list_malicious_paths(self): # the path cannot contain .. path = '/usr/local/samba/var/locks/sysvol/../../../../../../root/' self.assertRaises(OSError, check_safe_path, path) self.assertEqual(check_safe_path('/etc/passwd'), 'etc/passwd') self.assertEqual(check_safe_path('\\\\etc/\\passwd'), 'etc/passwd') # there should be no backslashes used to delineate paths before = 'sysvol/' + realm + '\\Policies/' \ '{31B2F340-016D-11D2-945F-00C04FB984F9}\\GPT.INI' after = realm + '/Policies/' \ '{31B2F340-016D-11D2-945F-00C04FB984F9}/GPT.INI' result = check_safe_path(before) self.assertEqual( result, after, 'check_safe_path() didn\'t' ' correctly convert \\ to /') def test_gpt_ext_register(self): this_path = os.path.dirname(os.path.realpath(__file__)) samba_path = os.path.realpath(os.path.join(this_path, '../../../')) ext_path = os.path.join(samba_path, 'python/samba/gp_sec_ext.py') ext_guid = '{827D319E-6EAC-11D2-A4EA-00C04F79F83A}' ret = register_gp_extension(ext_guid, 'gp_access_ext', ext_path, smb_conf=self.lp.configfile, machine=True, user=False) self.assertTrue(ret, 'Failed to register a gp ext') gp_exts = list_gp_extensions(self.lp.configfile) self.assertTrue(ext_guid in gp_exts.keys(), 'Failed to list gp exts') self.assertEqual(gp_exts[ext_guid]['DllName'], ext_path, 'Failed to list gp exts') unregister_gp_extension(ext_guid) gp_exts = list_gp_extensions(self.lp.configfile) self.assertTrue(ext_guid not in gp_exts.keys(), 'Failed to unregister gp exts') self.assertTrue(check_guid(ext_guid), 'Failed to parse valid guid') self.assertFalse(check_guid('AAAAAABBBBBBBCCC'), 'Parsed invalid guid') lp, parser = parse_gpext_conf(self.lp.configfile) self.assertTrue(lp and parser, 'parse_gpext_conf() invalid return') parser.add_section('test_section') parser.set('test_section', 'test_var', ext_guid) atomic_write_conf(lp, parser) lp, parser = parse_gpext_conf(self.lp.configfile) self.assertTrue('test_section' in parser.sections(), 'test_section not found in gpext.conf') self.assertEqual(parser.get('test_section', 'test_var'), ext_guid, 'Failed to find test variable in gpext.conf') parser.remove_section('test_section') atomic_write_conf(lp, parser) def test_gp_log_get_applied(self): local_path = self.lp.get('path', 'sysvol') guids = [ '{31B2F340-016D-11D2-945F-00C04FB984F9}', '{6AC1786C-016F-11D2-945F-00C04FB984F9}' ] gpofile = '%s/' + realm + '/Policies/%s/MACHINE/Microsoft/' \ 'Windows NT/SecEdit/GptTmpl.inf' stage = '[System Access]\nMinimumPasswordAge = 998\n' cache_dir = self.lp.get('cache directory') store = GPOStorage(os.path.join(cache_dir, 'gpo.tdb')) for guid in guids: gpttmpl = gpofile % (local_path, guid) ret = stage_file(gpttmpl, stage) self.assertTrue(ret, 'Could not create the target %s' % gpttmpl) ret = gpupdate_force(self.lp) self.assertEqual(ret, 0, 'gpupdate force failed') gp_db = store.get_gplog(self.dc_account) applied_guids = gp_db.get_applied_guids() self.assertEqual(len(applied_guids), 2, 'The guids were not found') self.assertIn(guids[0], applied_guids, '%s not in applied guids' % guids[0]) self.assertIn(guids[1], applied_guids, '%s not in applied guids' % guids[1]) applied_settings = gp_db.get_applied_settings(applied_guids) for policy in applied_settings: self.assertIn('System Access', policy[1], 'System Access policies not set') self.assertIn('minPwdAge', policy[1]['System Access'], 'minPwdAge policy not set') if policy[0] == guids[0]: self.assertEqual(int(policy[1]['System Access']['minPwdAge']), days2rel_nttime(1), 'minPwdAge policy not set') elif policy[0] == guids[1]: self.assertEqual(int(policy[1]['System Access']['minPwdAge']), days2rel_nttime(998), 'minPwdAge policy not set') ads = gpo.ADS_STRUCT(self.server, self.lp, self.creds) if ads.connect(): gpos = ads.get_gpo_list(self.dc_account) del_gpos = get_deleted_gpos_list(gp_db, gpos[:-1]) self.assertEqual(len(del_gpos), 1, 'Returned delete gpos is incorrect') self.assertEqual(guids[-1], del_gpos[0][0], 'GUID for delete gpo is incorrect') self.assertIn('System Access', del_gpos[0][1], 'System Access policies not set for removal') self.assertIn('minPwdAge', del_gpos[0][1]['System Access'], 'minPwdAge policy not set for removal') for guid in guids: gpttmpl = gpofile % (local_path, guid) unstage_file(gpttmpl) ret = gpupdate_unapply(self.lp) self.assertEqual(ret, 0, 'gpupdate unapply failed') def test_process_group_policy(self): local_path = self.lp.cache_path('gpo_cache') guids = [ '{31B2F340-016D-11D2-945F-00C04FB984F9}', '{6AC1786C-016F-11D2-945F-00C04FB984F9}' ] gpofile = '%s/' + policies + '/%s/MACHINE/MICROSOFT/' \ 'WINDOWS NT/SECEDIT/GPTTMPL.INF' logger = logging.getLogger('gpo_tests') cache_dir = self.lp.get('cache directory') store = GPOStorage(os.path.join(cache_dir, 'gpo.tdb')) machine_creds = Credentials() machine_creds.guess(self.lp) machine_creds.set_machine_account() # Initialize the group policy extension ext = gp_krb_ext(logger, self.lp, machine_creds, store) ads = gpo.ADS_STRUCT(self.server, self.lp, machine_creds) if ads.connect(): gpos = ads.get_gpo_list(machine_creds.get_username()) stage = '[Kerberos Policy]\nMaxTicketAge = %d\n' opts = [100, 200] for i in range(0, 2): gpttmpl = gpofile % (local_path, guids[i]) ret = stage_file(gpttmpl, stage % opts[i]) self.assertTrue(ret, 'Could not create the target %s' % gpttmpl) # Process all gpos ext.process_group_policy([], gpos) ret = store.get_int('kdc:user_ticket_lifetime') self.assertEqual(ret, opts[1], 'Higher priority policy was not set') # Remove policy gp_db = store.get_gplog(machine_creds.get_username()) del_gpos = get_deleted_gpos_list(gp_db, []) ext.process_group_policy(del_gpos, []) ret = store.get_int('kdc:user_ticket_lifetime') self.assertEqual(ret, None, 'MaxTicketAge should not have applied') # Process just the first gpo ext.process_group_policy([], gpos[:-1]) ret = store.get_int('kdc:user_ticket_lifetime') self.assertEqual(ret, opts[0], 'Lower priority policy was not set') # Remove policy ext.process_group_policy(del_gpos, []) for guid in guids: gpttmpl = gpofile % (local_path, guid) unstage_file(gpttmpl) def test_gp_scripts(self): local_path = self.lp.cache_path('gpo_cache') guid = '{31B2F340-016D-11D2-945F-00C04FB984F9}' reg_pol = os.path.join(local_path, policies, guid, 'MACHINE/REGISTRY.POL') logger = logging.getLogger('gpo_tests') cache_dir = self.lp.get('cache directory') store = GPOStorage(os.path.join(cache_dir, 'gpo.tdb')) machine_creds = Credentials() machine_creds.guess(self.lp) machine_creds.set_machine_account() # Initialize the group policy extension ext = gp_scripts_ext(logger, self.lp, machine_creds, store) ads = gpo.ADS_STRUCT(self.server, self.lp, machine_creds) if ads.connect(): gpos = ads.get_gpo_list(machine_creds.get_username()) reg_key = b'Software\\Policies\\Samba\\Unix Settings' sections = { b'%s\\Daily Scripts' % reg_key: '.cron.daily', b'%s\\Monthly Scripts' % reg_key: '.cron.monthly', b'%s\\Weekly Scripts' % reg_key: '.cron.weekly', b'%s\\Hourly Scripts' % reg_key: '.cron.hourly' } for keyname in sections.keys(): # Stage the Registry.pol file with test data stage = preg.file() e = preg.entry() e.keyname = keyname e.valuename = b'Software\\Policies\\Samba\\Unix Settings' e.type = 1 e.data = b'echo hello world' stage.num_entries = 1 stage.entries = [e] ret = stage_file(reg_pol, ndr_pack(stage)) self.assertTrue(ret, 'Could not create the target %s' % reg_pol) # Process all gpos, with temp output directory with TemporaryDirectory(sections[keyname]) as dname: ext.process_group_policy([], gpos, dname) scripts = os.listdir(dname) self.assertEquals( len(scripts), 1, 'The %s script was not created' % keyname.decode()) out, _ = Popen([os.path.join(dname, scripts[0])], stdout=PIPE).communicate() self.assertIn(b'hello world', out, '%s script execution failed' % keyname.decode()) # Remove policy gp_db = store.get_gplog(machine_creds.get_username()) del_gpos = get_deleted_gpos_list(gp_db, []) ext.process_group_policy(del_gpos, []) self.assertEquals(len(os.listdir(dname)), 0, 'Unapply failed to cleanup scripts') # Unstage the Registry.pol file unstage_file(reg_pol) def test_gp_sudoers(self): local_path = self.lp.cache_path('gpo_cache') guid = '{31B2F340-016D-11D2-945F-00C04FB984F9}' reg_pol = os.path.join(local_path, policies, guid, 'MACHINE/REGISTRY.POL') logger = logging.getLogger('gpo_tests') cache_dir = self.lp.get('cache directory') store = GPOStorage(os.path.join(cache_dir, 'gpo.tdb')) machine_creds = Credentials() machine_creds.guess(self.lp) machine_creds.set_machine_account() # Initialize the group policy extension ext = gp_sudoers_ext(logger, self.lp, machine_creds, store) ads = gpo.ADS_STRUCT(self.server, self.lp, machine_creds) if ads.connect(): gpos = ads.get_gpo_list(machine_creds.get_username()) # Stage the Registry.pol file with test data stage = preg.file() e = preg.entry() e.keyname = b'Software\\Policies\\Samba\\Unix Settings\\Sudo Rights' e.valuename = b'Software\\Policies\\Samba\\Unix Settings' e.type = 1 e.data = b'fakeu ALL=(ALL) NOPASSWD: ALL' stage.num_entries = 1 stage.entries = [e] ret = stage_file(reg_pol, ndr_pack(stage)) self.assertTrue(ret, 'Could not create the target %s' % reg_pol) # Process all gpos, with temp output directory with TemporaryDirectory() as dname: ext.process_group_policy([], gpos, dname) sudoers = os.listdir(dname) self.assertEquals(len(sudoers), 1, 'The sudoer file was not created') self.assertIn(e.data, open(os.path.join(dname, sudoers[0]), 'r').read(), 'The sudoers entry was not applied') # Remove policy gp_db = store.get_gplog(machine_creds.get_username()) del_gpos = get_deleted_gpos_list(gp_db, []) ext.process_group_policy(del_gpos, []) self.assertEquals(len(os.listdir(dname)), 0, 'Unapply failed to cleanup scripts') # Unstage the Registry.pol file unstage_file(reg_pol) def test_gp_inf_ext_utf(self): logger = logging.getLogger('gpo_tests') cache_dir = self.lp.get('cache directory') store = GPOStorage(os.path.join(cache_dir, 'gpo.tdb')) machine_creds = Credentials() machine_creds.guess(self.lp) machine_creds.set_machine_account() ext = gp_inf_ext(logger, self.lp, machine_creds, store) test_data = '[Kerberos Policy]\nMaxTicketAge = 99\n' with NamedTemporaryFile() as f: with codecs.open(f.name, 'w', 'utf-16') as w: w.write(test_data) try: inf_conf = ext.read(f.name) except UnicodeDecodeError: self.fail('Failed to parse utf-16') self.assertIn('Kerberos Policy', inf_conf.keys(), 'Kerberos Policy was not read from the file') self.assertEquals(inf_conf.get('Kerberos Policy', 'MaxTicketAge'), '99', 'MaxTicketAge was not read from the file') with NamedTemporaryFile() as f: with codecs.open(f.name, 'w', 'utf-8') as w: w.write(test_data) inf_conf = ext.read(f.name) self.assertIn('Kerberos Policy', inf_conf.keys(), 'Kerberos Policy was not read from the file') self.assertEquals(inf_conf.get('Kerberos Policy', 'MaxTicketAge'), '99', 'MaxTicketAge was not read from the file') def test_rsop(self): logger = logging.getLogger('gpo_tests') cache_dir = self.lp.get('cache directory') local_path = self.lp.cache_path('gpo_cache') store = GPOStorage(os.path.join(cache_dir, 'gpo.tdb')) machine_creds = Credentials() machine_creds.guess(self.lp) machine_creds.set_machine_account() ads = gpo.ADS_STRUCT(self.server, self.lp, machine_creds) if ads.connect(): gpos = ads.get_gpo_list(machine_creds.get_username()) gp_extensions = [] gp_extensions.append(gp_krb_ext) gp_extensions.append(gp_scripts_ext) gp_extensions.append(gp_sudoers_ext) gp_extensions.append(gp_smb_conf_ext) gp_extensions.append(gp_msgs_ext) # Create registry stage data reg_pol = os.path.join(local_path, policies, '%s/MACHINE/REGISTRY.POL') reg_stage = preg.file() e = preg.entry() e.keyname = b'Software\\Policies\\Samba\\Unix Settings\\Daily Scripts' e.valuename = b'Software\\Policies\\Samba\\Unix Settings' e.type = 1 e.data = b'echo hello world' e2 = preg.entry() e2.keyname = b'Software\\Policies\\Samba\\Unix Settings\\Sudo Rights' e2.valuename = b'Software\\Policies\\Samba\\Unix Settings' e2.type = 1 e2.data = b'fakeu ALL=(ALL) NOPASSWD: ALL' e3 = preg.entry() e3.keyname = 'Software\\Policies\\Samba\\smb_conf\\apply group policies' e3.type = 4 e3.data = 1 e3.valuename = 'apply group policies' e4 = preg.entry() e4.keyname = b'Software\\Policies\\Samba\\Unix Settings\\Messages' e4.valuename = b'issue' e4.type = 1 e4.data = b'Welcome to \\s \\r \\l' reg_stage.num_entries = 4 reg_stage.entries = [e, e2, e3, e4] # Create krb stage date gpofile = os.path.join(local_path, policies, '%s/MACHINE/MICROSOFT/' \ 'WINDOWS NT/SECEDIT/GPTTMPL.INF') krb_stage = '[Kerberos Policy]\nMaxTicketAge = 99\n' for g in [g for g in gpos if g.file_sys_path]: ret = stage_file(gpofile % g.name, krb_stage) self.assertTrue( ret, 'Could not create the target %s' % (gpofile % g.name)) ret = stage_file(reg_pol % g.name, ndr_pack(reg_stage)) self.assertTrue( ret, 'Could not create the target %s' % (reg_pol % g.name)) for ext in gp_extensions: ext = ext(logger, self.lp, machine_creds, store) ret = ext.rsop(g) self.assertEquals( len(ret.keys()), 1, 'A single policy should have been displayed') # Check the Security Extension if type(ext) == gp_krb_ext: self.assertIn('Kerberos Policy', ret.keys(), 'Kerberos Policy not found') self.assertIn('MaxTicketAge', ret['Kerberos Policy'], 'MaxTicketAge setting not found') self.assertEquals(ret['Kerberos Policy']['MaxTicketAge'], '99', 'MaxTicketAge was not set to 99') # Check the Scripts Extension elif type(ext) == gp_scripts_ext: self.assertIn('Daily Scripts', ret.keys(), 'Daily Scripts not found') self.assertIn('echo hello world', ret['Daily Scripts'], 'Daily script was not created') # Check the Sudoers Extension elif type(ext) == gp_sudoers_ext: self.assertIn('Sudo Rights', ret.keys(), 'Sudoers not found') self.assertIn('fakeu ALL=(ALL) NOPASSWD: ALL', ret['Sudo Rights'], 'Sudoers policy not created') # Check the smb.conf Extension elif type(ext) == gp_smb_conf_ext: self.assertIn('smb.conf', ret.keys(), 'apply group policies was not applied') self.assertIn(e3.valuename, ret['smb.conf'], 'apply group policies was not applied') self.assertEquals(ret['smb.conf'][e3.valuename], e3.data, 'apply group policies was not set') # Check the Messages Extension elif type(ext) == gp_msgs_ext: self.assertIn('/etc/issue', ret, 'Login Prompt Message not applied') self.assertEquals(ret['/etc/issue'], e4.data, 'Login Prompt Message not set') unstage_file(gpofile % g.name) unstage_file(reg_pol % g.name) # Check that a call to gpupdate --rsop also succeeds ret = rsop(self.lp) self.assertEquals(ret, 0, 'gpupdate --rsop failed!') def test_gp_unapply(self): logger = logging.getLogger('gpo_tests') cache_dir = self.lp.get('cache directory') local_path = self.lp.cache_path('gpo_cache') guid = '{31B2F340-016D-11D2-945F-00C04FB984F9}' store = GPOStorage(os.path.join(cache_dir, 'gpo.tdb')) machine_creds = Credentials() machine_creds.guess(self.lp) machine_creds.set_machine_account() ads = gpo.ADS_STRUCT(self.server, self.lp, machine_creds) if ads.connect(): gpos = ads.get_gpo_list(machine_creds.get_username()) gp_extensions = [] gp_extensions.append(gp_krb_ext) gp_extensions.append(gp_scripts_ext) gp_extensions.append(gp_sudoers_ext) # Create registry stage data reg_pol = os.path.join(local_path, policies, '%s/MACHINE/REGISTRY.POL') reg_stage = preg.file() e = preg.entry() e.keyname = b'Software\\Policies\\Samba\\Unix Settings\\Daily Scripts' e.valuename = b'Software\\Policies\\Samba\\Unix Settings' e.type = 1 e.data = b'echo hello world' e2 = preg.entry() e2.keyname = b'Software\\Policies\\Samba\\Unix Settings\\Sudo Rights' e2.valuename = b'Software\\Policies\\Samba\\Unix Settings' e2.type = 1 e2.data = b'fakeu ALL=(ALL) NOPASSWD: ALL' reg_stage.num_entries = 2 reg_stage.entries = [e, e2] # Create krb stage date gpofile = os.path.join(local_path, policies, '%s/MACHINE/MICROSOFT/' \ 'WINDOWS NT/SECEDIT/GPTTMPL.INF') krb_stage = '[Kerberos Policy]\nMaxTicketAge = 99\n' ret = stage_file(gpofile % guid, krb_stage) self.assertTrue(ret, 'Could not create the target %s' % (gpofile % guid)) ret = stage_file(reg_pol % guid, ndr_pack(reg_stage)) self.assertTrue(ret, 'Could not create the target %s' % (reg_pol % guid)) # Process all gpos, with temp output directory remove = [] with TemporaryDirectory() as dname: for ext in gp_extensions: ext = ext(logger, self.lp, machine_creds, store) if type(ext) == gp_krb_ext: ext.process_group_policy([], gpos) ret = store.get_int('kdc:user_ticket_lifetime') self.assertEqual(ret, 99, 'Kerberos policy was not set') elif type(ext) in [gp_scripts_ext, gp_sudoers_ext]: ext.process_group_policy([], gpos, dname) gp_db = store.get_gplog(machine_creds.get_username()) applied_settings = gp_db.get_applied_settings([guid]) for _, fname in applied_settings[-1][-1][str(ext)].items(): self.assertIn(dname, fname, 'Test file not created in tmp dir') self.assertTrue(os.path.exists(fname), 'Test file not created') remove.append(fname) # Unapply policy, and ensure policies are removed gpupdate_unapply(self.lp) for fname in remove: self.assertFalse(os.path.exists(fname), 'Unapply did not remove test file') ret = store.get_int('kdc:user_ticket_lifetime') self.assertNotEqual(ret, 99, 'Kerberos policy was not unapplied') unstage_file(gpofile % guid) unstage_file(reg_pol % guid) def test_smb_conf_ext(self): local_path = self.lp.cache_path('gpo_cache') guid = '{31B2F340-016D-11D2-945F-00C04FB984F9}' reg_pol = os.path.join(local_path, policies, guid, 'MACHINE/REGISTRY.POL') logger = logging.getLogger('gpo_tests') cache_dir = self.lp.get('cache directory') store = GPOStorage(os.path.join(cache_dir, 'gpo.tdb')) machine_creds = Credentials() machine_creds.guess(self.lp) machine_creds.set_machine_account() ads = gpo.ADS_STRUCT(self.server, self.lp, machine_creds) if ads.connect(): gpos = ads.get_gpo_list(machine_creds.get_username()) entries = [] e = preg.entry() e.keyname = 'Software\\Policies\\Samba\\smb_conf\\template homedir' e.type = 1 e.data = '/home/samba/%D/%U' e.valuename = 'template homedir' entries.append(e) e = preg.entry() e.keyname = 'Software\\Policies\\Samba\\smb_conf\\apply group policies' e.type = 4 e.data = 1 e.valuename = 'apply group policies' entries.append(e) e = preg.entry() e.keyname = 'Software\\Policies\\Samba\\smb_conf\\ldap timeout' e.type = 4 e.data = 9999 e.valuename = 'ldap timeout' entries.append(e) stage = preg.file() stage.num_entries = len(entries) stage.entries = entries ret = stage_file(reg_pol, ndr_pack(stage)) self.assertTrue(ret, 'Failed to create the Registry.pol file') with NamedTemporaryFile(suffix='_smb.conf') as f: copyfile(self.lp.configfile, f.name) lp = LoadParm(f.name) # Initialize the group policy extension ext = gp_smb_conf_ext(logger, lp, machine_creds, store) ext.process_group_policy([], gpos) lp = LoadParm(f.name) template_homedir = lp.get('template homedir') self.assertEquals(template_homedir, '/home/samba/%D/%U', 'template homedir was not applied') apply_group_policies = lp.get('apply group policies') self.assertTrue(apply_group_policies, 'apply group policies was not applied') ldap_timeout = lp.get('ldap timeout') self.assertEquals(ldap_timeout, 9999, 'ldap timeout was not applied') # Remove policy gp_db = store.get_gplog(machine_creds.get_username()) del_gpos = get_deleted_gpos_list(gp_db, []) ext.process_group_policy(del_gpos, []) lp = LoadParm(f.name) template_homedir = lp.get('template homedir') self.assertEquals(template_homedir, self.lp.get('template homedir'), 'template homedir was not unapplied') apply_group_policies = lp.get('apply group policies') self.assertEquals(apply_group_policies, self.lp.get('apply group policies'), 'apply group policies was not unapplied') ldap_timeout = lp.get('ldap timeout') self.assertEquals(ldap_timeout, self.lp.get('ldap timeout'), 'ldap timeout was not unapplied') # Unstage the Registry.pol file unstage_file(reg_pol) def test_gp_motd(self): local_path = self.lp.cache_path('gpo_cache') guid = '{31B2F340-016D-11D2-945F-00C04FB984F9}' reg_pol = os.path.join(local_path, policies, guid, 'MACHINE/REGISTRY.POL') logger = logging.getLogger('gpo_tests') cache_dir = self.lp.get('cache directory') store = GPOStorage(os.path.join(cache_dir, 'gpo.tdb')) machine_creds = Credentials() machine_creds.guess(self.lp) machine_creds.set_machine_account() # Initialize the group policy extension ext = gp_msgs_ext(logger, self.lp, machine_creds, store) ads = gpo.ADS_STRUCT(self.server, self.lp, machine_creds) if ads.connect(): gpos = ads.get_gpo_list(machine_creds.get_username()) # Stage the Registry.pol file with test data stage = preg.file() e1 = preg.entry() e1.keyname = b'Software\\Policies\\Samba\\Unix Settings\\Messages' e1.valuename = b'motd' e1.type = 1 e1.data = b'Have a lot of fun!' stage.num_entries = 2 e2 = preg.entry() e2.keyname = b'Software\\Policies\\Samba\\Unix Settings\\Messages' e2.valuename = b'issue' e2.type = 1 e2.data = b'Welcome to \\s \\r \\l' stage.entries = [e1, e2] ret = stage_file(reg_pol, ndr_pack(stage)) self.assertTrue(ret, 'Could not create the target %s' % reg_pol) # Process all gpos, with temp output directory with TemporaryDirectory() as dname: ext.process_group_policy([], gpos, dname) motd_file = os.path.join(dname, 'motd') self.assertTrue(os.path.exists(motd_file), 'Message of the day file not created') data = open(motd_file, 'r').read() self.assertEquals(data, e1.data, 'Message of the day not applied') issue_file = os.path.join(dname, 'issue') self.assertTrue(os.path.exists(issue_file), 'Login Prompt Message file not created') data = open(issue_file, 'r').read() self.assertEquals(data, e2.data, 'Login Prompt Message not applied') # Unapply policy, and ensure the test files are removed gp_db = store.get_gplog(machine_creds.get_username()) del_gpos = get_deleted_gpos_list(gp_db, []) ext.process_group_policy(del_gpos, [], dname) data = open(motd_file, 'r').read() self.assertFalse(data, 'Message of the day file not removed') data = open(issue_file, 'r').read() self.assertFalse(data, 'Login Prompt Message file not removed') # Unstage the Registry.pol file unstage_file(reg_pol)
class MigrationFix(object): def __init__(self, username=None, no_dry_run=False, mysql_string=None): self.lp = LoadParm() self.lp.set('debug level', '0') self.lp.load_default() self.username = username self.no_dry_run = no_dry_run self.conn = self._open_mysql_conn(mysql_string) def _open_mysql_conn(self, mysql_conn): if mysql_conn is None: conn_str = self.lp.get('mapiproxy:openchangedb') else: conn_str = mysql_conn if not conn_str: raise Exception("No mysql connection string specified and no mapiproxy:openchangedb param option found") # mysql://openchange:password@localhost/openchange m = re.search('(?P<scheme>.+)://(?P<user>.+):(?P<pass>.+)@' '(?P<host>.+)/(?P<db>.+)', conn_str) if not m: raise Exception("Unable to parse mysql connection string: %s" % conn_str) group_dict = m.groupdict() if group_dict['scheme'] != 'mysql': raise Exception("mysql connection string should start with mysql:// (got %s)", group_dict['scheme']) conn = MySQLdb.connect(host=group_dict['host'], user=group_dict['user'], passwd=group_dict['pass'], db=group_dict['db']) conn.autocommit(True) return conn def fix(self, username, folder_id, uri, mailbox_id, ou_id): error = False c = self.conn.cursor() c.execute("SELECT fmid FROM mapistore_indexing WHERE username=%s AND url=%s", (username,uri)) result = c.fetchone() if result is None: print '[KO]: %s: could not find fmid for %s' % (username, uri) return True fmid = result[0] if str(fmid) != str(folder_id): error = True if self.no_dry_run is True: c = self.conn.cursor() c.execute("UPDATE folders SET folder_id=%s WHERE MAPIStoreURI=%s AND mailbox_id=%s AND ou_id=%s", (fmid, uri, mailbox_id, ou_id)) print '[FIX]: %s: folder_id for %s has been fixed and is now set to %s' % (username, uri, folder_id) else: print '[KO]: %s: Mismatch for %s: found %s, expected %s' % (username, uri, folder_id, fmid) return error def run_all(self): c = self.conn.cursor() c.execute("SELECT id,ou_id,name FROM mailboxes") rows = c.fetchall() for row in rows: mailbox_id = row[0] ou_id = row[1] username = row[2] # Retrieve all MAPIStoreURI from folders table for username c = self.conn.cursor() c.execute("SELECT folder_id,MAPIStoreURI FROM folders WHERE mailbox_id=%s AND ou_id=%s AND MAPIStoreURI!=\"\"", (mailbox_id,ou_id)) frows = c.fetchall() # Now check in mapistore_indexing if fmid are matching for given URI gl_error = False for frow in frows: folder_id = frow[0] uri = frow[1] error = self.fix(username, folder_id, uri, mailbox_id, ou_id) if error is True: gl_error = True if gl_error is False: print '[OK]: %s is OK' % (username) def run(self): # Retrieve username id, ou_id c = self.conn.cursor() c.execute("SELECT id,ou_id FROM mailboxes WHERE name=%s", self.username) (mailbox_id,ou_id) = c.fetchone() # Retrieve all MAPIStoreURI from folders table for username c = self.conn.cursor() c.execute("SELECT folder_id,MAPIStoreURI FROM folders WHERE mailbox_id=%s AND ou_id=%s AND MAPIStoreURI!=\"\"", (mailbox_id,ou_id)) rows = c.fetchall() # Now check in mapistore_indexing if fmid are matching for given URI for row in rows: folder_id = row[0] uri = row[1] self.fix(self.username, folder_id, uri, mailbox_id, ou_id)
def test_vgp_access_list(self): lp = LoadParm() lp.load(os.environ['SERVERCONFFILE']) local_path = lp.get('path', 'sysvol') vgp_xml = os.path.join( local_path, lp.get('realm').lower(), 'Policies', self.gpo_guid, 'Machine/VGP/VTLA/VAS' 'HostAccessControl/Allow/manifest.xml') stage = etree.Element('vgppolicy') policysetting = etree.SubElement(stage, 'policysetting') pv = etree.SubElement(policysetting, 'version') pv.text = '1' name = etree.SubElement(policysetting, 'name') name.text = 'Host Access Control' description = etree.SubElement(policysetting, 'description') description.text = 'Represents host access control data (pam_access)' apply_mode = etree.SubElement(policysetting, 'apply_mode') apply_mode.text = 'merge' data = etree.SubElement(policysetting, 'data') listelement = etree.SubElement(data, 'listelement') etype = etree.SubElement(listelement, 'type') etype.text = 'USER' entry = etree.SubElement(listelement, 'entry') entry.text = 'goodguy@%s' % lp.get('realm').lower() adobject = etree.SubElement(listelement, 'adobject') name = etree.SubElement(adobject, 'name') name.text = 'goodguy' domain = etree.SubElement(adobject, 'domain') domain.text = lp.get('realm').lower() etype = etree.SubElement(adobject, 'type') etype.text = 'user' groupattr = etree.SubElement(data, 'groupattr') groupattr.text = 'samAccountName' listelement = etree.SubElement(data, 'listelement') etype = etree.SubElement(listelement, 'type') etype.text = 'GROUP' entry = etree.SubElement(listelement, 'entry') entry.text = '%s\\goodguys' % lp.get('realm').lower() adobject = etree.SubElement(listelement, 'adobject') name = etree.SubElement(adobject, 'name') name.text = 'goodguys' domain = etree.SubElement(adobject, 'domain') domain.text = lp.get('realm').lower() etype = etree.SubElement(adobject, 'type') etype.text = 'group' ret = stage_file(vgp_xml, etree.tostring(stage, 'utf-8')) self.assertTrue(ret, 'Could not create the target %s' % vgp_xml) uentry = '+:%s\\goodguy:ALL' % domain.text gentry = '+:%s\\goodguys:ALL' % domain.text (result, out, err) = self.runsublevelcmd( "gpo", ("manage", "access", "list"), self.gpo_guid, "-H", "ldap://%s" % os.environ["SERVER"], "-U%s%%%s" % (os.environ["USERNAME"], os.environ["PASSWORD"])) self.assertIn(uentry, out, 'The test entry was not found!') self.assertIn(gentry, out, 'The test entry was not found!') # Unstage the manifest.xml file unstage_file(vgp_xml)
class SambaOCHelper(object): def __init__(self): self.samba_lp = LoadParm() self.samba_lp.set('debug level', '0') self.samba_lp.load_default() self.next_fmid = None url = self.samba_lp.get('dcerpc_mapiproxy:samdb_url') or \ self.samba_lp.private_path("sam.ldb") self.samdb = SamDB(url=url, lp=self.samba_lp, session_info=system_session()) self.conn = self._open_mysql_connection() def _open_mysql_connection(self): connection_string = self.samba_lp.get('mapiproxy:openchangedb') if not connection_string: raise Exception("Not found mapiproxy:openchangedb on samba configuration") # mysql://openchange:password@localhost/openchange m = re.search(r'(?P<scheme>.+)://(?P<user>.+):(?P<pass>.+)@(?P<host>.+)/(?P<db>.+)', connection_string) if not m: raise Exception("Unable to parse mapiproxy:openchangedb: %s" % connection_string) group_dict = m.groupdict() if group_dict['scheme'] != 'mysql': raise Exception("mapiproxy:openchangedb should start with mysql:// (we got %s)", group_dict['scheme']) conn = MySQLdb.connect(host=group_dict['host'], user=group_dict['user'], passwd=group_dict['pass'], db=group_dict['db']) conn.autocommit(True) return conn def invalid_user(self, username): ret = self.samdb.search(base=self.samdb.domain_dn(), scope=ldb.SCOPE_SUBTREE, expression="(sAMAccountName=%s)" % ldb.binary_encode(username)) return len(ret) != 1 def find_email_of(self, username): ret = self.samdb.search(base=self.samdb.domain_dn(), scope=ldb.SCOPE_SUBTREE, attrs=["mail"], expression="(sAMAccountName=%s)" % ldb.binary_encode(username)) return ret[0]["mail"][0] def active_openchange_users(self): c = self.conn.cursor() c.execute("SELECT name FROM mailboxes") return sorted([row[0] for row in c.fetchall()]) def allocate_fmids(self, count, username): if self.next_fmid is None: c = self.conn.cursor() c.execute("SELECT next_fmid FROM mapistore_indexes WHERE username = '******'" % username) self.next_fmid = c.fetchone()[0] if self.next_fmid is not None: self.next_fmid = int(self.next_fmid) + count return (int(self.next_fmid) - count, self.next_fmid) def create_indexes(self, username): c = self.conn.cursor() c.execute("INSERT INTO mapistore_indexes (username,next_fmid) VALUES('%s','1024')" % (username)) return def commit_start(self): self.conn.autocommit(False) def insert_indexing(self, username, fmid, url): c = self.conn.cursor() c.execute("INSERT INTO mapistore_indexing (username,fmid,url,soft_deleted) VALUES('%s','%s','%s', '0')" % (username, str(fmid), url)) def update_indexes(self, count, username): c = self.conn.cursor() updated_number = int(count) + int(self.next_fmid) print "Updating next_fmid to %s" % str(updated_number) c.execute("UPDATE mapistore_indexes SET next_fmid='%s' WHERE username='******'" % (str(updated_number), username)) def commit_end(self): c = self.conn.cursor() self.conn.commit() c.close() self.conn.autocommit(True)
if __name__ == '__main__': ## most of this is extracted from source4/scripting/python/samba/provision/* lp = LoadParm() lp.load('/etc/samba/smb.conf') samdb = SamDB('/var/lib/samba/private/sam.ldb', session_info=system_session(lp), lp=lp) secretsdb = samba.Ldb('/var/lib/samba/private/secrets.ldb', session_info=system_session(lp), lp=lp) paths = ProvisionPaths() paths.private_dir = lp.get("private dir") names = ProvisionNames() # NT domain, kerberos realm, root dn, domain dn, domain dns name names.realm = lp.get("realm").upper() names.domain = lp.get("workgroup").upper() names.domaindn = samdb.domain_dn() names.dnsdomain = samba.ldb.Dn(samdb, names.domaindn).canonical_str().replace( "/", "") basedn = samba.dn_from_dns_name(names.dnsdomain) # Get the netbiosname first (could be obtained from smb.conf in theory) res = secretsdb.search(expression="(flatname=%s)" % names.domain, base="CN=Primary Domains", scope=samba.ldb.SCOPE_SUBTREE,