def testInit(self): """Construct an empty or seeded PasswdMap.""" self.assertEquals(passwd.PasswdMap, type(passwd.PasswdMap()), msg='failed to create emtpy PasswdMap') pmap = passwd.PasswdMap([self._good_entry]) self.assertEquals(self._good_entry, pmap.PopItem(), msg='failed to seed PasswdMap with list') self.assertRaises(TypeError, passwd.PasswdMap, ['string'])
def testIncrementalUpdate(self): """An incremental update reads a partial map and merges it.""" # Unlike in a full update, we create a cache map and a source map, and # let it merge them. If it goes to write the merged map, we're good. # Also check that timestamps were updated, as in testFullUpdate above. def compare_function(map_object): return len(map_object) == 2 original_modify_stamp = 1 new_modify_stamp = 2 updater = map_updater.MapUpdater(config.MAP_PASSWORD, self.workdir, {}, can_do_incremental=True) updater.WriteModifyTimestamp(original_modify_stamp) cache_map_entry = passwd.PasswdMapEntry({ 'name': 'bar', 'uid': 20, 'gid': 20 }) cache_map = passwd.PasswdMap([cache_map_entry]) cache_map.SetModifyTimestamp(original_modify_stamp) cache_mock = self.mox.CreateMock(caches.Cache) cache_mock.GetMap().AndReturn(cache_map) cache_mock.WriteMap(map_data=mox.Func(compare_function)).AndReturn(0) source_map_entry = passwd.PasswdMapEntry({ 'name': 'foo', 'uid': 10, 'gid': 10 }) source_map = passwd.PasswdMap([source_map_entry]) source_map.SetModifyTimestamp(new_modify_stamp) source_mock = self.mox.CreateMock(source.Source) source_mock.GetMap(config.MAP_PASSWORD, location=None, since=original_modify_stamp).AndReturn(source_map) self.mox.ReplayAll() self.assertEqual( 0, updater.UpdateCacheFromSource(cache_mock, source_mock, incremental=True, force_write=False, location=None)) self.assertEqual(updater.GetModifyTimestamp(), new_modify_stamp) self.assertNotEqual(updater.GetUpdateTimestamp(), None)
def testWriteCacheAndIndex(self): cache = files.FilesPasswdMapHandler(self.config) entries = [ passwd.PasswdMapEntry(dict(name='foo', uid=10, gid=10)), passwd.PasswdMapEntry(dict(name='bar', uid=11, gid=11)), ] pmap = passwd.PasswdMap(entries) written = cache.Write(pmap) cache.WriteIndex() self.assertTrue('foo' in written) self.assertTrue('bar' in written) index_filename = cache.GetCacheFilename() + '.ixname' self.assertTrue(os.path.exists(index_filename), 'Index not created %s' % index_filename) index_filename = cache.GetCacheFilename() + '.ixuid' self.assertTrue(os.path.exists(index_filename), 'Index not created %s' % index_filename) entries = [ passwd.PasswdMapEntry(dict(name='foo', uid=10, gid=10)), passwd.PasswdMapEntry(dict(name='bar', uid=11, gid=11)), passwd.PasswdMapEntry(dict(name='quux', uid=12, gid=11)), ] pmap = passwd.PasswdMap(entries) written = cache.Write(pmap) self.assertTrue('foo' in written) self.assertTrue('bar' in written) self.assertTrue('quux' in written) index_filename = cache.GetCacheFilename() + '.ixname' with open(index_filename) as f: self.assertEqual('bar\x0015\x00\n', f.readline()) self.assertEqual('foo\x000\x00\x00\n', f.readline()) index_filename = cache.GetCacheFilename() + '.ixuid' with open(index_filename) as f: self.assertEqual('10\x000\x00\x00\n', f.readline()) self.assertEqual('11\x0015\x00\n', f.readline()) cache.WriteIndex() index_filename = cache.GetCacheFilename() + '.ixname' with open(index_filename) as f: self.assertEqual('bar\x0015\x00\x00\n', f.readline()) self.assertEqual('foo\x000\x00\x00\x00\n', f.readline()) self.assertEqual('quux\x0030\x00\n', f.readline()) index_filename = cache.GetCacheFilename() + '.ixuid' with open(index_filename) as f: self.assertEqual('10\x000\x00\x00\n', f.readline()) self.assertEqual('11\x0015\x00\n', f.readline()) self.assertEqual('12\x0030\x00\n', f.readline())
def setUp(self): super(TestVerifyCommand, self).setUp() class DummyConfig(object): pass class DummySource(source.Source): name = 'dummy' def Verify(self): return 0 # Instead of a DummyCache, we will override cache_factory.Create so # we can return a pmock cache object. self.original_caches_create = cache_factory.Create self.original_sources_create = source_factory.Create # Add dummy source to the set if implementations of sources. source_factory.RegisterImplementation(DummySource) # Create a config with a section for a passwd map. self.conf = DummyConfig() self.conf.options = {config.MAP_PASSWORD: config.MapOptions()} self.conf.options[config.MAP_PASSWORD].cache = {'name': 'dummy'} self.conf.options[config.MAP_PASSWORD].source = {'name': 'dummy'} self.original_verify_configuration = config.VerifyConfiguration self.original_getmap = nss.GetMap self.original_getpwall = pwd.getpwall self.original_getgrall = grp.getgrall # Setup maps used by VerifyMap testing. big_map = passwd.PasswdMap() map_entry1 = passwd.PasswdMapEntry() map_entry1.name = 'foo' map_entry1.uid = 10 map_entry1.gid = 10 big_map.Add(map_entry1) map_entry2 = passwd.PasswdMapEntry() map_entry2.name = 'bar' map_entry2.uid = 20 map_entry2.gid = 20 big_map.Add(map_entry2) small_map = passwd.PasswdMap() small_map.Add(map_entry1) self.big_map = big_map self.small_map = small_map
def testUpdateMapsTrapsPermissionDenied(self): self.mox.StubOutWithMock(map_updater.MapUpdater, 'UpdateFromSource') map_updater.MapUpdater.UpdateFromSource(mox.IgnoreArg(), incremental=True, force_write=False).AndRaise( error.PermissionDenied) self.mox.StubOutClassWithMocks(lock, 'PidFile') lock_mock = lock.PidFile(filename=None) lock_mock.Lock(force=False).AndReturn(True) lock_mock.Locked().AndReturn(True) lock_mock.Unlock() self.conf.maps = [config.MAP_PASSWORD] self.conf.cache = 'dummy' modify_stamp = 1 map_entry = passwd.PasswdMapEntry({'name': 'foo', 'uid': 10, 'gid': 10}) passwd_map = passwd.PasswdMap([map_entry]) passwd_map.SetModifyTimestamp(modify_stamp) source_mock = self.mox.CreateMock(source.Source) self.mox.StubOutWithMock(source_factory, 'Create') source_factory.Create(self.conf.options[ config.MAP_PASSWORD].source).AndReturn(source_mock) cache_mock = self.mox.CreateMock(caches.Cache) self.mox.StubOutWithMock(cache_factory, 'Create') self.mox.ReplayAll() c = command.Update() self.assertEqual( 1, c.UpdateMaps(self.conf, incremental=True, force_write=False))
def testUpdateSingleMaps(self): self.mox.StubOutClassWithMocks(lock, 'PidFile') lock_mock = lock.PidFile(filename=None) lock_mock.Lock(force=False).AndReturn(True) lock_mock.Locked().AndReturn(True) lock_mock.Unlock() self.conf.maps = [config.MAP_PASSWORD] self.conf.cache = 'dummy' modify_stamp = 1 map_entry = passwd.PasswdMapEntry({'name': 'foo', 'uid': 10, 'gid': 10}) passwd_map = passwd.PasswdMap([map_entry]) passwd_map.SetModifyTimestamp(modify_stamp) source_mock = self.mox.CreateMock(source.Source) source_mock.GetMap(config.MAP_PASSWORD, location=None).AndReturn(passwd_map) self.mox.StubOutWithMock(source_factory, 'Create') source_factory.Create(self.conf.options[ config.MAP_PASSWORD].source).AndReturn(source_mock) cache_mock = self.mox.CreateMock(caches.Cache) cache_mock.WriteMap(map_data=passwd_map).AndReturn(0) self.mox.StubOutWithMock(cache_factory, 'Create') cache_factory.Create(self.conf.options[config.MAP_PASSWORD].cache, config.MAP_PASSWORD).AndReturn(cache_mock) self.mox.ReplayAll() c = command.Update() self.assertEqual( 0, c.UpdateMaps(self.conf, incremental=True, force_write=False))
def testFullUpdateOnEmptySource(self): """A full update as above, but instead, the initial source is empty.""" original_modify_stamp = time.gmtime(1) new_modify_stamp = time.gmtime(2) # Construct an updater self.updater = files_updater.FileMapUpdater(config.MAP_PASSWORD, self.workdir, {'name': 'files', 'dir': self.workdir2}) self.updater.WriteModifyTimestamp(original_modify_stamp) # Construct a cache cache = files.FilesPasswdMapHandler({'dir': self.workdir2}) map_entry = passwd.PasswdMapEntry({'name': 'foo', 'uid': 10, 'gid': 10}) password_map = passwd.PasswdMap() password_map.SetModifyTimestamp(new_modify_stamp) password_map.Add(map_entry) cache.Write(password_map) source_mock = self.mox.CreateMock(source.FileSource) source_mock.GetFile(config.MAP_PASSWORD, mox.IgnoreArg(), current_file=mox.IgnoreArg(), location=None).AndReturn(None) self.mox.ReplayAll() self.assertRaises(error.EmptyMap, self.updater.UpdateCacheFromSource, cache, source_mock, force_write=False, location=None) self.assertNotEqual(new_modify_stamp, self.updater.GetModifyTimestamp()) self.assertEqual(None, self.updater.GetUpdateTimestamp())
def testExists(self): """Verify Exists() checks for presence of MapEntry objects.""" pmap = passwd.PasswdMap() entry = self._good_entry self.assertFalse(pmap.Exists(entry)) pmap.Add(entry) self.assertTrue(pmap.Exists(entry))
def testNssDbPasswdHandlerWriteData(self): entry_string = 'foo:x:1000:1000:foo:/:/bin/sh' makedb_stdin = self.mox.CreateMock(sys.stdin) makedb_stdin.write('.foo %s\n' % entry_string) makedb_stdin.write('=1000 %s\n' % entry_string) makedb_stdin.write('00 %s\n' % entry_string) passwd_map = passwd.PasswdMap() passwd_map_entry = passwd.PasswdMapEntry() passwd_map_entry.name = 'foo' passwd_map_entry.uid = 1000 passwd_map_entry.gid = 1000 passwd_map_entry.gecos = 'foo' passwd_map_entry.dir = '/' passwd_map_entry.shell = '/bin/sh' passwd_map_entry.passwd = 'x' self.failUnless(passwd_map.Add(passwd_map_entry)) writer = nssdb.NssDbPasswdHandler({ 'makedb': '/bin/false', 'dir': '/tmp' }) self.mox.ReplayAll() writer.WriteData(makedb_stdin, passwd_map_entry, 0)
def testIndexCreation(self): cache = files.FilesPasswdMapHandler(self.config) entries = [ passwd.PasswdMapEntry(dict(name='foo', uid=10, gid=10)), passwd.PasswdMapEntry(dict(name='bar', uid=11, gid=11)), passwd.PasswdMapEntry(dict(name='quux', uid=12, gid=11)), ] pmap = passwd.PasswdMap(entries) cache.Write(pmap) cache.WriteIndex() index_filename = cache.GetCacheFilename() + '.ixname' self.failUnless(os.path.exists(index_filename), 'Index not created %s' % index_filename) f = open(index_filename) self.assertEqual('bar\x0015\x00\x00\n', f.readline()) self.assertEqual('foo\x000\x00\x00\x00\n', f.readline()) self.assertEqual('quux\x0030\x00\n', f.readline()) index_filename = cache.GetCacheFilename() + '.ixuid' self.failUnless(os.path.exists(index_filename), 'Index not created %s' % index_filename) f = open(index_filename) self.assertEqual('10\x000\x00\x00\n', f.readline()) self.assertEqual('11\x0015\x00\n', f.readline()) self.assertEqual('12\x0030\x00\n', f.readline())
def testFullUpdate(self): """A full update reads the source, writes to cache, and updates times.""" original_modify_stamp = 1 new_modify_stamp = 2 updater = map_updater.MapUpdater( config.MAP_PASSWORD, self.workdir, {}) updater.WriteModifyTimestamp(original_modify_stamp) map_entry = passwd.PasswdMapEntry({'name': 'foo', 'uid': 10, 'gid': 10}) password_map = passwd.PasswdMap([map_entry]) password_map.SetModifyTimestamp(new_modify_stamp) cache_mock = self.mox.CreateMock(files.FilesCache) cache_mock.WriteMap(map_data=password_map).AndReturn(0) source_mock = self.mox.CreateMock(source.Source) source_mock.GetMap(config.MAP_PASSWORD, location=None).AndReturn(password_map) self.mox.ReplayAll() self.assertEqual(0, updater.UpdateCacheFromSource(cache_mock, source_mock, False, False, None)) self.assertEqual(updater.GetModifyTimestamp(), new_modify_stamp) self.assertNotEqual(updater.GetUpdateTimestamp(), None)
def testVerify(self): # Can't test if no makedb if not os.path.exists('/usr/bin/makedb'): raise TestSkipped('no /usr/bin/makedb') # create a map m = passwd.PasswdMap() e = passwd.PasswdMapEntry() e.name = 'foo' e.uid = 1000 e.gid = 2000 self.failUnless(m.Add(e)) updater = nssdb.NssDbPasswdHandler({ 'dir': self.workdir, 'makedb': '/usr/bin/makedb' }) written = updater.Write(m) self.failUnless(os.path.exists(updater.temp_cache_filename), 'updater.Write() did not create a file') retval = updater.Verify(written) self.failUnlessEqual(True, retval) os.unlink(updater.temp_cache_filename)
def testVerifyFailure(self): # create a map m = passwd.PasswdMap() e = passwd.PasswdMapEntry() e.name = 'foo' e.uid = 1000 e.gid = 2000 self.assertTrue(m.Add(e)) updater = nssdb.NssDbPasswdHandler({ 'dir': self.workdir, 'makedb': '/usr/bin/makedb' }) written = updater.Write(m) self.assertTrue(os.path.exists(updater.temp_cache_filename), 'updater.Write() did not create a file') # change the cache db = btopen(updater.temp_cache_filename) del db[db.first()[0]] db.sync() db.close() retval = updater.Verify(written) self.assertEqual(False, retval) self.assertFalse( os.path.exists(os.path.join(updater.temp_cache_filename)))
def testWrite(self): cache = files.FilesPasswdMapHandler(self.config) entry = passwd.PasswdMapEntry({'name': 'foo', 'uid': 10, 'gid': 10}) pmap = passwd.PasswdMap([entry]) written = cache.Write(pmap) self.assertTrue('foo' in written) self.assertFalse(entry in pmap) # we emptied pmap to avoid mem leaks self.assertFalse(cache.temp_cache_file.closed)
def testGetMap(self): passwd_map = passwd.PasswdMap() cache_info = StringIO.StringIO('''[ {"Key": "org/users/foo/uid", "Value": "MTA="}, {"Key": "org/users/foo/gid", "Value": "MTA="}, {"Key": "org/users/foo/home", "Value": "L2hvbWUvZm9v"}, {"Key": "org/users/foo/shell", "Value": "L2Jpbi9iYXNo"}, {"Key": "org/users/foo/comment", "Value": "SG93IE5vdyBCcm93biBDb3c="} ]''') self.parser.GetMap(cache_info, passwd_map) self.assertEquals(self.good_entry, passwd_map.PopItem())
def testIterate(self): """Check that we can iterate over PasswdMap.""" pmap = passwd.PasswdMap() pmap.Add(self._good_entry) ret_entries = [] for entry in pmap: ret_entries.append(entry) self.assertEquals(len(ret_entries), 1, msg='iterated over wrong count') self.assertEquals(ret_entries[0], self._good_entry, msg='got the wrong entry back')
def testLastModificationTimestamp(self): """Test setting/getting of timestamps on maps.""" m = passwd.PasswdMap() # we only work in whole-second resolution now = int(time.time()) m.SetModifyTimestamp(now) self.assertEqual(now, m._last_modification_timestamp) ts = m.GetModifyTimestamp() self.assertEqual(now, ts)
def testFullUpdate(self): original_modify_stamp = time.gmtime(1) new_modify_stamp = time.gmtime(2) # Construct a fake source. def GetFile(map_name, dst_file, current_file, location): print(("GetFile: %s" % dst_file)) f = open(dst_file, 'w') f.write('root:x:0:0:root:/root:/bin/bash\n') f.close() os.utime(dst_file, (1, 2)) os.system("ls -al %s" % dst_file) return dst_file dst_file = mox.Value() source_mock = self.mox.CreateMock(source.FileSource) source_mock.GetFile(config.MAP_PASSWORD, mox.Remember(dst_file), current_file=mox.IgnoreArg(), location=mox.IgnoreArg()).WithSideEffects( GetFile).AndReturn(dst_file) # Construct the cache. cache = files.FilesPasswdMapHandler({'dir': self.workdir2}) map_entry = passwd.PasswdMapEntry({ 'name': 'foo', 'uid': 10, 'gid': 10 }) password_map = passwd.PasswdMap() password_map.SetModifyTimestamp(new_modify_stamp) password_map.Add(map_entry) cache.Write(password_map) updater = files_updater.FileMapUpdater(config.MAP_PASSWORD, self.workdir, { 'name': 'files', 'dir': self.workdir2 }) updater.WriteModifyTimestamp(original_modify_stamp) self.mox.ReplayAll() self.assertEqual( 0, updater.UpdateCacheFromSource(cache, source_mock, force_write=False, location=None)) self.assertEqual(new_modify_stamp, updater.GetModifyTimestamp()) self.assertNotEqual(None, updater.GetUpdateTimestamp())
def testGetMap(self): passwd_map = passwd.PasswdMap() cache_info = StringIO.StringIO('''[ { "Key": "foo", "Value": { "uid": 10, "gid": 10, "home": "/home/foo", "shell": "/bin/bash", "comment": "How Now Brown Cow", "irrelevant_key":"bacon" } } ]''') self.parser.GetMap(cache_info, passwd_map) self.assertEquals(self.good_entry, passwd_map.PopItem())
def testNssDbPasswdHandlerWrite(self): ent = 'foo:x:1000:1000:foo:/:/bin/sh' makedb_stdin = self.mox.CreateMock(sys.stdin) makedb_stdin.write('.foo %s\n' % ent) makedb_stdin.write('=1000 %s\n' % ent) makedb_stdin.write('00 %s\n' % ent) makedb_stdin.close() makedb_stdout = self.mox.CreateMock(sys.stdout) makedb_stdout.read().AndReturn('') makedb_stdout.close() m = passwd.PasswdMap() pw = passwd.PasswdMapEntry() pw.name = 'foo' pw.uid = 1000 pw.gid = 1000 pw.gecos = 'foo' pw.dir = '/' pw.shell = '/bin/sh' pw.passwd = 'x' pw.Verify() self.failUnless(m.Add(pw)) self.mox.StubOutWithMock(select, 'select') select.select([makedb_stdout], (), (), 0).AndReturn(([37], [], [])) select.select([makedb_stdout], (), (), 0).AndReturn(([], [], [])) def SpawnMakeDb(): makedb = MakeDbDummy() makedb.stdin = makedb_stdin makedb.stdout = makedb_stdout return makedb writer = nssdb.NssDbPasswdHandler({ 'makedb': '/usr/bin/makedb', 'dir': self.workdir }) writer._SpawnMakeDb = SpawnMakeDb self.mox.ReplayAll() writer.Write(m) tmppasswd = os.path.join(self.workdir, 'passwd.db') self.failIf(os.path.exists(tmppasswd)) # just clean it up, Write() doesn't Commit() writer._Rollback()
def testContains(self): """Verify __contains__ works, and does a deep compare.""" pentry_good = self._good_entry pentry_like_good = passwd.PasswdMapEntry() pentry_like_good.name = 'foo' # same Key(), but rest of attributes differ pentry_bad = passwd.PasswdMapEntry() pentry_bad.name = 'bar' pmap = passwd.PasswdMap([pentry_good]) self.assertTrue(pentry_good in pmap, msg='expected entry to be in map') self.assertFalse(pentry_bad in pmap, msg='did not expect entry to be in map') self.assertFalse(pentry_like_good in pmap, msg='__contains__ not doing a deep compare')
def testAdd(self): """Add raises exceptions for objects it can't add or verify.""" pmap = passwd.PasswdMap() entry = self._good_entry self.assertTrue(pmap.Add(entry), msg='failed to add new entry.') self.assertEqual(1, len(pmap), msg='unexpected size for Map.') ret_entry = pmap.PopItem() self.assertEqual(ret_entry, entry, msg='failed to pop existing entry.') gentry = group.GroupMapEntry() gentry.name = 'foo' gentry.gid = 10 self.assertRaises(TypeError, pmap.Add, gentry)
def GetPasswdMap(): """Returns a PasswdMap built from nss calls.""" passwd_map = passwd.PasswdMap() for nss_entry in pwd.getpwall(): map_entry = passwd.PasswdMapEntry() map_entry.name = nss_entry[0] map_entry.passwd = nss_entry[1] map_entry.uid = nss_entry[2] map_entry.gid = nss_entry[3] map_entry.gecos = nss_entry[4] map_entry.dir = nss_entry[5] map_entry.shell = nss_entry[6] passwd_map.Add(map_entry) return passwd_map
def testGetMapHasMerge(self): timestamp = int(time.time()) update_ts_filename = os.path.join( self.workdir, 'passwd.db.nsscache-update-timestamp') update_ts_file = open(update_ts_filename, 'w') update_ts_file.write( '%s\n' % time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime(timestamp))) update_ts_file.close() db_filename = os.path.join(self.workdir, 'passwd.db') db = bsddb.btopen(db_filename) db.close() cache = nssdb.NssDbPasswdHandler({'dir': self.workdir}) cache_map = cache.GetMap() self.assertEquals(False, cache_map.Merge(passwd.PasswdMap())) os.unlink(update_ts_filename) os.unlink(db_filename)
def testVerifyFailure(self): # Can't test if no makedb if not os.path.exists('/usr/bin/makedb'): raise TestSkipped('no /usr/bin/makedb') # Hide the warning that we expect to get class TestFilter(logging.Filter): def filter(self, record): return not record.msg.startswith( 'verify failed: %d keys missing') fltr = TestFilter() logging.getLogger('NssDbPasswdHandler').addFilter(fltr) # create a map m = passwd.PasswdMap() e = passwd.PasswdMapEntry() e.name = 'foo' e.uid = 1000 e.gid = 2000 self.failUnless(m.Add(e)) updater = nssdb.NssDbPasswdHandler({ 'dir': self.workdir, 'makedb': '/usr/bin/makedb' }) written = updater.Write(m) self.failUnless(os.path.exists(updater.temp_cache_filename), 'updater.Write() did not create a file') # change the cache db = bsddb.btopen(updater.temp_cache_filename) del db[db.first()[0]] db.sync() db.close() retval = updater.Verify(written) self.failUnlessEqual(False, retval) self.failIf(os.path.exists(os.path.join(updater.temp_cache_filename))) # no longer hide this message logging.getLogger('NssDbPasswdHandler').removeFilter(fltr)
def testMerge(self): """Verify Merge() throws the right exceptions and correctly merges.""" # Setup some MapEntry objects with distinct Key()s pentry1 = self._good_entry pentry2 = passwd.PasswdMapEntry() pentry2.name = 'john' pentry3 = passwd.PasswdMapEntry() pentry3.name = 'jane' # Setup some Map objects pmap_big = passwd.PasswdMap([pentry1, pentry2]) pmap_small = passwd.PasswdMap([pentry3]) # Merge small into big self.assertTrue(pmap_big.Merge(pmap_small), msg='Merging small into big failed!') self.assertTrue(pmap_big.Exists(pentry1), msg='pentry1 not found in Map') self.assertTrue(pmap_big.Exists(pentry2), msg='pentry1 not found in Map') self.assertTrue(pmap_big.Exists(pentry3), msg='pentry1 not found in Map') # A second merge should do nothing self.assertFalse(pmap_big.Merge(pmap_small), msg='Re-merging small into big succeeded.') # An empty merge should do nothing self.assertFalse(pmap_big.Merge(passwd.PasswdMap()), msg='Empty Merge should have done nothing.') # Merge a GroupMap should throw TypeError gmap = group.GroupMap() self.assertRaises(TypeError, pmap_big.Merge, gmap) # Merge an older map should throw an UnsupportedMap old_map = passwd.PasswdMap(modify_time=1) new_map = passwd.PasswdMap(modify_time=2) self.assertRaises(error.InvalidMerge, new_map.Merge, old_map) old_map = passwd.PasswdMap(update_time=1) new_map = passwd.PasswdMap(update_time=2) self.assertRaises(error.InvalidMerge, new_map.Merge, old_map)
def testWriteTestBdb(self): # Can't test if no makedb if not os.path.exists('/usr/bin/makedb'): raise TestSkipped('no /usr/bin/makedb') data = passwd.PasswdMap() pw = passwd.PasswdMapEntry() pw.name = 'foo' pw.passwd = 'x' pw.uid = 1000 pw.gid = 1000 pw.gecos = 'doody' pw.dir = '/' pw.shell = '/bin/sh' self.failUnless(data.Add(pw)) # instantiate object under test dummy_config = {'dir': self.workdir} cache = nssdb.NssDbPasswdHandler(dummy_config) written = cache.Write(data) self.assertTrue('.foo' in written) self.assertTrue('=1000' in written) # perform test db = bsddb.btopen(cache.temp_cache_filename, 'r') self.assertEqual(3, len(db.keys())) self.failUnless('.foo' in db.keys()) self.failUnless('=1000' in db.keys()) self.failUnless('00' in db.keys()) # convert data to pwent d = '%s:x:%s:%s:%s:%s:%s\x00' % (pw.name, pw.uid, pw.gid, pw.gecos, pw.dir, pw.shell) self.assertEqual(db['00'], d) self.assertEqual(db['.foo'], d) self.assertEqual(db['=1000'], d) # tear down os.unlink(cache.temp_cache_filename)
def testVerify(self): # create a map m = passwd.PasswdMap() e = passwd.PasswdMapEntry() e.name = 'foo' e.uid = 1000 e.gid = 2000 self.assertTrue(m.Add(e)) updater = nssdb.NssDbPasswdHandler({ 'dir': self.workdir, 'makedb': '/usr/bin/makedb' }) written = updater.Write(m) self.assertTrue(os.path.exists(updater.temp_cache_filename), 'updater.Write() did not create a file') retval = updater.Verify(written) self.assertEqual(True, retval) os.unlink(updater.temp_cache_filename)
def testWriteTestBdb(self): data = passwd.PasswdMap() pw = passwd.PasswdMapEntry() pw.name = 'foo' pw.passwd = 'x' pw.uid = 1000 pw.gid = 1000 pw.gecos = 'doody' pw.dir = '/' pw.shell = '/bin/sh' self.assertTrue(data.Add(pw)) # instantiate object under test dummy_config = {'dir': self.workdir} cache = nssdb.NssDbPasswdHandler(dummy_config) written = cache.Write(data) self.assertTrue(b'.foo' in written) self.assertTrue(b'=1000' in written) # perform test db = btopen(cache.temp_cache_filename, 'r') self.assertEqual(3, len(list(db.keys()))) self.assertTrue(b'.foo' in list(db.keys())) self.assertTrue(b'=1000' in list(db.keys())) self.assertTrue(b'00' in list(db.keys())) # convert data to pwent d = ('%s:x:%s:%s:%s:%s:%s\x00' % (pw.name, pw.uid, pw.gid, pw.gecos, pw.dir, pw.shell)).encode('ascii') self.assertEqual(db[b'00'], d) self.assertEqual(db[b'.foo'], d) self.assertEqual(db[b'=1000'], d) # tear down os.unlink(cache.temp_cache_filename)
def __init__(self, conf, map_name, automount_mountpoint=None): """Initialise the Cache object. Args: conf: A dictionary of key/value pairs map_name: A string representation of the map type automount_mountpoint: A string containing the automount mountpoint, used only by automount maps. Raises: UnsupportedMap: for map types we don't know about """ super(Cache, self).__init__() # Set up a logger for our children self.log = logging.getLogger(self.__class__.__name__) # Store config info self.conf = conf self.output_dir = conf.get('dir', '.') self.automount_mountpoint = automount_mountpoint self.map_name = map_name # Setup the map we may be asked to load our cache into. if map_name == config.MAP_PASSWORD: self.data = passwd.PasswdMap() elif map_name == config.MAP_SSHKEY: self.data = sshkey.SshkeyMap() elif map_name == config.MAP_GROUP: self.data = group.GroupMap() elif map_name == config.MAP_SHADOW: self.data = shadow.ShadowMap() elif map_name == config.MAP_NETGROUP: self.data = netgroup.NetgroupMap() elif map_name == config.MAP_AUTOMOUNT: self.data = automount.AutomountMap() else: raise error.UnsupportedMap('Cache does not support %s' % map_name)