def test_gem(self): mock = MagicMock(return_value={'retcode': 0, 'stdout': ''}) with patch.dict( gem.__salt__, { 'rvm.is_installed': MagicMock(return_value=False), 'rbenv.is_installed': MagicMock(return_value=False), 'cmd.run_all': mock }): gem._gem(['install', 'rails']) mock.assert_called_once_with(['gem', 'install', 'rails'], runas=None, python_shell=False) mock = MagicMock(return_value={'retcode': 0, 'stdout': ''}) rvm_mock = MagicMock() with patch.dict( gem.__salt__, { 'rvm.is_installed': rvm_mock, 'rbenv.is_installed': rvm_mock, 'cmd.run_all': mock }): gem._gem(['install', 'rails'], gem_bin='/usr/local/bin/gem') self.assertEqual( False, rvm_mock.called, 'Should never call rvm.is_installed if gem_bin provided') mock.assert_called_once_with( ['/usr/local/bin/gem', 'install', 'rails'], runas=None, python_shell=False) mock = MagicMock(return_value=None) with patch.dict( gem.__salt__, { 'rvm.is_installed': MagicMock(return_value=True), 'rbenv.is_installed': MagicMock(return_value=False), 'rvm.do': mock }): gem._gem(['install', 'rails'], ruby='1.9.3') mock.assert_called_once_with('1.9.3', ['gem', 'install', 'rails'], runas=None) mock = MagicMock(return_value=None) with patch.dict( gem.__salt__, { 'rvm.is_installed': MagicMock(return_value=False), 'rbenv.is_installed': MagicMock(return_value=True), 'rbenv.do': mock }): gem._gem(['install', 'rails']) mock.assert_called_once_with(['gem', 'install', 'rails'], runas=None)
class PostgresUserTestCase(TestCase): @patch.dict( SALT_STUB, { 'postgres.role_get': Mock(return_value=None), 'postgres.user_create': MagicMock(), }) def test_present__creation(self): # test=True with patch.dict(OPTS, {'test': True}): ret = postgres_user.present('foo') self.assertEqual( ret, { 'comment': 'User foo is set to be created', 'changes': {}, 'name': 'foo', 'result': None }) self.assertEqual(SALT_STUB['postgres.user_create'].call_count, 0) # test=False ret = postgres_user.present('foo') self.assertEqual( ret, { 'comment': 'The user foo has been created', 'changes': { 'foo': 'Present' }, 'name': 'foo', 'result': True }) SALT_STUB['postgres.user_create'].assert_called_once_with( username='******', superuser=None, encrypted=True, runas=None, inherit=None, rolepassword=None, port=None, replication=None, host=None, createroles=None, user=None, groups=None, maintenance_db=None, login=None, password=None, createdb=None) @patch.dict( SALT_STUB, { 'postgres.role_get': Mock( return_value={ 'can create databases': False, 'can create roles': False, 'can login': False, 'can update system catalogs': False, 'connections': None, 'defaults variables': {}, 'expiry time': None, 'inherits privileges': True, 'replication': False, 'superuser': False, }), 'postgres.user_update': MagicMock(), }) def test_present__update(self): # test=True with patch.dict(OPTS, {'test': True}): ret = postgres_user.present('foo', login=True, replication=False) self.assertEqual( ret, { 'comment': 'User foo is set to be updated', 'changes': { 'foo': { 'login': True } }, 'name': 'foo', 'result': None }) self.assertEqual(SALT_STUB['postgres.user_update'].call_count, 0) # test=False ret = postgres_user.present('foo', login=True, replication=False) self.assertEqual( ret, { 'comment': 'The user foo has been updated', 'changes': { 'foo': { 'login': True } }, 'name': 'foo', 'result': True }) SALT_STUB['postgres.user_update'].assert_called_once_with( username='******', superuser=None, encrypted=True, runas=None, inherit=None, rolepassword=None, port=None, replication=False, host=None, createroles=None, user=None, groups=None, maintenance_db=None, login=True, password=None, createdb=None) @patch.dict( SALT_STUB, { 'postgres.role_get': Mock( return_value={ 'can create databases': False, 'can create roles': False, 'can login': False, 'can update system catalogs': False, 'connections': None, 'defaults variables': {}, 'expiry time': None, 'inherits privileges': True, 'replication': False, 'superuser': False, }), 'postgres.user_update': MagicMock(), }) def test_present__no_update(self): # test=True with patch.dict(OPTS, {'test': True}): ret = postgres_user.present('foo', login=False, replication=False) self.assertEqual( ret, { 'comment': 'User foo is already present', 'changes': {}, 'name': 'foo', 'result': True }) self.assertEqual(SALT_STUB['postgres.user_update'].call_count, 0) # test=False ret = postgres_user.present('foo', login=False, replication=False) self.assertEqual( ret, { 'comment': 'User foo is already present', 'changes': {}, 'name': 'foo', 'result': True }) self.assertEqual(SALT_STUB['postgres.user_update'].call_count, 0)
def test_present(self): ''' Test to ensure that the named database is present with the specified properties. ''' name = 'main' version = '9.4' ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} mock_t = MagicMock(return_value=True) mock_f = MagicMock(return_value=False) infos = {'{0}/{1}'.format(version, name): {}} mock = MagicMock(return_value=infos) with patch.dict( postgres_cluster.__salt__, { 'postgres.cluster_list': mock, 'postgres.cluster_exists': mock_t, 'postgres.cluster_create': mock_t, }): comt = ('Cluster {0}/{1} is already present'.format(version, name)) ret.update({'comment': comt, 'result': True}) self.assertDictEqual(postgres_cluster.present(version, name), ret) infos['{0}/{1}'.format(version, name)]['port'] = 5433 comt = ('Cluster {0}/{1} has wrong parameters ' 'which couldn\'t be changed on fly.'.format(version, name)) ret.update({'comment': comt, 'result': False}) self.assertDictEqual( postgres_cluster.present(version, name, port=5434), ret) infos['{0}/{1}'.format(version, name)]['datadir'] = '/tmp/' comt = ('Cluster {0}/{1} has wrong parameters ' 'which couldn\'t be changed on fly.'.format(version, name)) ret.update({'comment': comt, 'result': False}) self.assertDictEqual( postgres_cluster.present(version, name, port=5434), ret) with patch.dict( postgres_cluster.__salt__, { 'postgres.cluster_list': mock, 'postgres.cluster_exists': mock_f, 'postgres.cluster_create': mock_t, }): comt = 'The cluster {0}/{1} has been created'.format(version, name) ret.update({ 'comment': comt, 'result': True, 'changes': { '{0}/{1}'.format(version, name): 'Present' } }) self.assertDictEqual(postgres_cluster.present(version, name), ret) with patch.dict(postgres_cluster.__opts__, {'test': True}): comt = 'Cluster {0}/{1} is set to be created'.format( version, name) ret.update({'comment': comt, 'result': None, 'changes': {}}) self.assertDictEqual(postgres_cluster.present(version, name), ret) with patch.dict( postgres_cluster.__salt__, { 'postgres.cluster_list': mock, 'postgres.cluster_exists': mock_f, 'postgres.cluster_create': mock_f, }): comt = 'Failed to create cluster {0}/{1}'.format(version, name) ret.update({'comment': comt, 'result': False}) self.assertDictEqual(postgres_cluster.present(version, name), ret)
class ZypperTestCase(TestCase): ''' Test cases for salt.modules.zypper ''' def setUp(self): self.new_repo_config = dict(name='mock-repo-name', url='http://repo.url/some/path') side_effect = [ Mock(**{'sections.return_value': []}), Mock(**{'sections.return_value': [self.new_repo_config['name']]}) ] self.zypper_patcher_config = { '_get_configured_repos': Mock(side_effect=side_effect), '__zypper__': Mock(), 'get_repo': Mock(return_value={}) } def test_list_upgrades(self): ''' List package upgrades :return: ''' ref_out = { 'stdout': get_test_data('zypper-updates.xml'), 'stderr': None, 'retcode': 0 } with patch.dict(zypper.__salt__, {'cmd.run_all': MagicMock(return_value=ref_out)}): upgrades = zypper.list_upgrades(refresh=False) self.assertEqual(len(upgrades), 3) for pkg, version in { 'SUSEConnect': '0.2.33-7.1', 'bind-utils': '9.9.6P1-35.1', 'bind-libs': '9.9.6P1-35.1' }.items(): self.assertIn(pkg, upgrades) self.assertEqual(upgrades[pkg], version) def test_zypper_caller(self): ''' Test Zypper caller. :return: ''' class RunSniffer(object): def __init__(self, stdout=None, stderr=None, retcode=None): self.calls = list() self._stdout = stdout or '' self._stderr = stderr or '' self._retcode = retcode or 0 def __call__(self, *args, **kwargs): self.calls.append({'args': args, 'kwargs': kwargs}) return { 'stdout': self._stdout, 'stderr': self._stderr, 'retcode': self._retcode } stdout_xml_snippet = '<?xml version="1.0"?><test foo="bar"/>' sniffer = RunSniffer(stdout=stdout_xml_snippet) with patch.dict('salt.modules.zypper.__salt__', {'cmd.run_all': sniffer}): self.assertEqual(zypper.__zypper__.call('foo'), stdout_xml_snippet) self.assertEqual(len(sniffer.calls), 1) zypper.__zypper__.call('bar') self.assertEqual(len(sniffer.calls), 2) self.assertEqual( sniffer.calls[0]['args'][0], ['zypper', '--non-interactive', '--no-refresh', 'foo']) self.assertEqual( sniffer.calls[1]['args'][0], ['zypper', '--non-interactive', '--no-refresh', 'bar']) dom = zypper.__zypper__.xml.call('xml-test') self.assertEqual(sniffer.calls[2]['args'][0], [ 'zypper', '--non-interactive', '--xmlout', '--no-refresh', 'xml-test' ]) self.assertEqual( dom.getElementsByTagName('test')[0].getAttribute('foo'), 'bar') zypper.__zypper__.refreshable.call('refresh-test') self.assertEqual(sniffer.calls[3]['args'][0], ['zypper', '--non-interactive', 'refresh-test']) zypper.__zypper__.nolock.call('no-locking-test') self.assertEqual( sniffer.calls[4].get('kwargs', {}).get('env', {}).get('ZYPP_READONLY_HACK'), "1") self.assertEqual( sniffer.calls[4].get('kwargs', {}).get('env', {}).get('SALT_RUNNING'), "1") zypper.__zypper__.call('locking-test') self.assertEqual( sniffer.calls[5].get('kwargs', {}).get('env', {}).get('ZYPP_READONLY_HACK'), None) self.assertEqual( sniffer.calls[5].get('kwargs', {}).get('env', {}).get('SALT_RUNNING'), "1") # Test exceptions stdout_xml_snippet = '<?xml version="1.0"?><stream><message type="error">Booya!</message></stream>' sniffer = RunSniffer(stdout=stdout_xml_snippet, retcode=1) with patch.dict('salt.modules.zypper.__salt__', {'cmd.run_all': sniffer}): with self.assertRaisesRegexp(CommandExecutionError, '^Zypper command failure: Booya!$'): zypper.__zypper__.xml.call('crashme') with self.assertRaisesRegexp( CommandExecutionError, "^Zypper command failure: Check Zypper's logs.$"): zypper.__zypper__.call('crashme again') zypper.__zypper__.noraise.call('stay quiet') self.assertEqual(zypper.__zypper__.error_msg, "Check Zypper's logs.") def test_list_upgrades_error_handling(self): ''' Test error handling in the list package upgrades. :return: ''' # Test handled errors ref_out = { 'stdout': '''<?xml version='1.0'?> <stream> <message type="info">Refreshing service 'container-suseconnect'.</message> <message type="error">Some handled zypper internal error</message> <message type="error">Another zypper internal error</message> </stream> ''', 'stderr': '', 'retcode': 1, } with patch.dict('salt.modules.zypper.__salt__', {'cmd.run_all': MagicMock(return_value=ref_out)}): with self.assertRaisesRegexp( CommandExecutionError, "^Zypper command failure: Some handled zypper internal error\nAnother zypper internal error$" ): zypper.list_upgrades(refresh=False) # Test unhandled error ref_out = {'retcode': 1, 'stdout': '', 'stderr': ''} with patch.dict('salt.modules.zypper.__salt__', {'cmd.run_all': MagicMock(return_value=ref_out)}): with self.assertRaisesRegexp( CommandExecutionError, "^Zypper command failure: Check Zypper's logs.$"): zypper.list_upgrades(refresh=False) def test_list_products(self): ''' List products test. ''' for filename, test_data in { 'zypper-products-sle12sp1.xml': { 'name': [ 'SLES', 'SLES', 'SUSE-Manager-Proxy', 'SUSE-Manager-Server', 'sle-manager-tools-beta', 'sle-manager-tools-beta-broken-eol', 'sle-manager-tools-beta-no-eol' ], 'vendor': 'SUSE LLC <https://www.suse.com/>', 'release': ['0', '0', '0', '0', '0', '0', '0'], 'productline': [None, None, None, None, None, None, 'sles'], 'eol_t': [ None, 0, 1509408000, 1522454400, 1522454400, 1730332800, 1730332800 ], 'isbase': [False, False, False, False, False, False, True], 'installed': [False, False, False, False, False, False, True], 'registerrelease': [None, None, None, None, None, None, '123'], }, 'zypper-products-sle11sp3.xml': { 'name': [ 'SUSE-Manager-Server', 'SUSE-Manager-Server', 'SUSE-Manager-Server-Broken-EOL', 'SUSE_SLES', 'SUSE_SLES', 'SUSE_SLES', 'SUSE_SLES-SP4-migration' ], 'vendor': 'SUSE LINUX Products GmbH, Nuernberg, Germany', 'release': ['1.138', '1.2', '1.2', '1.2', '1.201', '1.201', '1.4'], 'productline': [None, None, None, None, None, 'manager', 'manager'], 'eol_t': [None, 0, 0, 0, 0, 0, 0], 'isbase': [False, False, False, False, False, True, True], 'installed': [False, False, False, False, False, True, True], 'registerrelease': [None, None, None, None, None, None, "42"], } }.items(): ref_out = {'retcode': 0, 'stdout': get_test_data(filename)} with patch.dict(zypper.__salt__, {'cmd.run_all': MagicMock(return_value=ref_out)}): products = zypper.list_products() self.assertEqual(len(products), 7) self.assertIn(test_data['vendor'], [product['vendor'] for product in products]) for kwd in [ 'name', 'isbase', 'installed', 'release', 'productline', 'eol_t', 'registerrelease' ]: if six.PY3: self.assertCountEqual( test_data[kwd], [prod.get(kwd) for prod in products]) else: self.assertEqual( test_data[kwd], sorted([prod.get(kwd) for prod in products])) def test_refresh_db(self): ''' Test if refresh DB handled correctly ''' ref_out = [ "Repository 'openSUSE-Leap-42.1-LATEST' is up to date.", "Repository 'openSUSE-Leap-42.1-Update' is up to date.", "Retrieving repository 'openSUSE-Leap-42.1-Update-Non-Oss' metadata", "Forcing building of repository cache", "Building repository 'openSUSE-Leap-42.1-Update-Non-Oss' cache ..........[done]", "Building repository 'salt-dev' cache", "All repositories have been refreshed." ] run_out = {'stderr': '', 'stdout': '\n'.join(ref_out), 'retcode': 0} with patch.dict(zypper.__salt__, {'cmd.run_all': MagicMock(return_value=run_out)}): result = zypper.refresh_db() self.assertEqual(result.get("openSUSE-Leap-42.1-LATEST"), False) self.assertEqual(result.get("openSUSE-Leap-42.1-Update"), False) self.assertEqual(result.get("openSUSE-Leap-42.1-Update-Non-Oss"), True) def test_info_installed(self): ''' Test the return information of the named package(s), installed on the system. :return: ''' run_out = { 'virgo-dummy': { 'build_date': '2015-07-09T10:55:19Z', 'vendor': 'openSUSE Build Service', 'description': 'This is the Virgo dummy package used for testing SUSE Manager', 'license': 'GPL-2.0', 'build_host': 'sheep05', 'url': 'http://www.suse.com', 'build_date_time_t': 1436432119, 'relocations': '(not relocatable)', 'source_rpm': 'virgo-dummy-1.0-1.1.src.rpm', 'install_date': '2016-02-23T16:31:57Z', 'install_date_time_t': 1456241517, 'summary': 'Virgo dummy package', 'version': '1.0', 'signature': 'DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9', 'release': '1.1', 'group': 'Applications/System', 'arch': 'noarch', 'size': '17992' }, 'libopenssl1_0_0': { 'build_date': '2015-11-04T23:20:34Z', 'vendor': 'SUSE LLC <https://www.suse.com/>', 'description': 'The OpenSSL Project is a collaborative effort.', 'license': 'OpenSSL', 'build_host': 'sheep11', 'url': 'https://www.openssl.org/', 'build_date_time_t': 1446675634, 'relocations': '(not relocatable)', 'source_rpm': 'openssl-1.0.1i-34.1.src.rpm', 'install_date': '2016-02-23T16:31:35Z', 'install_date_time_t': 1456241495, 'summary': 'Secure Sockets and Transport Layer Security', 'version': '1.0.1i', 'signature': 'RSA/SHA256, Wed Nov 4 22:21:34 2015, Key ID 70af9e8139db7c82', 'release': '34.1', 'group': 'Productivity/Networking/Security', 'packager': 'https://www.suse.com/', 'arch': 'x86_64', 'size': '2576912' }, } with patch.dict(zypper.__salt__, {'lowpkg.info': MagicMock(return_value=run_out)}): installed = zypper.info_installed() # Test overall products length self.assertEqual(len(installed), 2) # Test translated fields for pkg_name, pkg_info in installed.items(): self.assertEqual(installed[pkg_name].get('source'), run_out[pkg_name]['source_rpm']) # Test keys transition from the lowpkg.info for pn_key, pn_val in run_out['virgo-dummy'].items(): if pn_key == 'source_rpm': continue self.assertEqual(installed['virgo-dummy'][pn_key], pn_val) def test_info_installed_with_non_ascii_char(self): ''' Test the return information of the named package(s), installed on the system whith non-ascii chars :return: ''' run_out = {'vīrgô': {'description': 'vīrgô d€šçripţiǫñ'}} with patch.dict(zypper.__salt__, {'lowpkg.info': MagicMock(return_value=run_out)}): installed = zypper.info_installed() self.assertEqual(installed['vīrgô']['description'], 'vīrgô d€šçripţiǫñ') def test_info_available(self): ''' Test return the information of the named package available for the system. :return: ''' test_pkgs = ['vim', 'emacs', 'python'] with patch( 'salt.modules.zypper.__zypper__', ZyppCallMock( return_value=get_test_data('zypper-available.txt'))): available = zypper.info_available(*test_pkgs, refresh=False) self.assertEqual(len(available), 3) for pkg_name, pkg_info in available.items(): self.assertIn(pkg_name, test_pkgs) self.assertEqual(available['emacs']['status'], 'up-to-date') self.assertTrue(available['emacs']['installed']) self.assertEqual(available['emacs']['support level'], 'Level 3') self.assertEqual(available['emacs']['vendor'], 'SUSE LLC <https://www.suse.com/>') self.assertEqual(available['emacs']['summary'], 'GNU Emacs Base Package') self.assertEqual(available['vim']['status'], 'not installed') self.assertFalse(available['vim']['installed']) self.assertEqual(available['vim']['support level'], 'Level 3') self.assertEqual(available['vim']['vendor'], 'SUSE LLC <https://www.suse.com/>') self.assertEqual(available['vim']['summary'], 'Vi IMproved') @patch('salt.modules.zypper.refresh_db', MagicMock(return_value=True)) def test_latest_version(self): ''' Test the latest version of the named package available for upgrade or installation. :return: ''' with patch( 'salt.modules.zypper.__zypper__', ZyppCallMock( return_value=get_test_data('zypper-available.txt'))): self.assertEqual(zypper.latest_version('vim'), '7.4.326-2.62') @patch('salt.modules.zypper.refresh_db', MagicMock(return_value=True)) @patch('salt.modules.zypper._systemd_scope', MagicMock(return_value=False)) @patch.dict('salt.modules.zypper.__grains__', {'osrelease_info': [12, 1]}) def test_upgrade_success(self): ''' Test system upgrade and dist-upgrade success. :return: ''' with patch('salt.modules.zypper.__zypper__.noraise.call', MagicMock()) as zypper_mock: with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{ "vim": "1.1" }, { "vim": "1.2" }])): ret = zypper.upgrade() self.assertDictEqual(ret, {"vim": { "old": "1.1", "new": "1.2" }}) zypper_mock.assert_any_call('update', '--auto-agree-with-licenses') with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{ "vim": "1.1" }, { "vim": "1.2" }])): ret = zypper.upgrade(dist_upgrade=True) self.assertDictEqual(ret, {"vim": { "old": "1.1", "new": "1.2" }}) zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses') with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{ "vim": "1.1" }, { "vim": "1.1" }])): ret = zypper.upgrade(dist_upgrade=True, dryrun=True) zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--dry-run') zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--dry-run', '--debug-solver') with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{ "vim": "1.1" }, { "vim": "1.1" }])): ret = zypper.upgrade(dist_upgrade=True, dryrun=True, fromrepo=["Dummy", "Dummy2"], novendorchange=True) zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--dry-run', '--from', "Dummy", '--from', 'Dummy2', '--no-allow-vendor-change') zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--dry-run', '--from', "Dummy", '--from', 'Dummy2', '--no-allow-vendor-change', '--debug-solver') with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{ "vim": "1.1" }, { "vim": "1.2" }])): ret = zypper.upgrade(dist_upgrade=True, fromrepo=["Dummy", "Dummy2"], novendorchange=True) self.assertDictEqual(ret, {"vim": { "old": "1.1", "new": "1.2" }}) zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--from', "Dummy", '--from', 'Dummy2', '--no-allow-vendor-change') @patch('salt.modules.zypper.refresh_db', MagicMock(return_value=True)) @patch('salt.modules.zypper._systemd_scope', MagicMock(return_value=False)) @patch.dict('salt.modules.zypper.__grains__', {'osrelease_info': [12, 1]}) def test_upgrade_failure(self): ''' Test system upgrade failure. :return: ''' zypper_out = ''' Loading repository data... Reading installed packages... Computing distribution upgrade... Use 'zypper repos' to get the list of defined repositories. Repository 'DUMMY' not found by its alias, number, or URI. ''' class FailingZypperDummy(object): def __init__(self): self.stdout = zypper_out self.stderr = "" self.pid = 1234 self.exit_code = 555 self.noraise = MagicMock() self.SUCCESS_EXIT_CODES = [0] def __call__(self, *args, **kwargs): return self with patch('salt.modules.zypper.__zypper__', FailingZypperDummy()) as zypper_mock: zypper_mock.noraise.call = MagicMock() with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{ "vim": "1.1" }, { "vim": "1.1" }])): with self.assertRaises(CommandExecutionError) as cmd_exc: ret = zypper.upgrade(dist_upgrade=True, fromrepo=["DUMMY"]) self.assertEqual(cmd_exc.exception.info['changes'], {}) self.assertEqual(cmd_exc.exception.info['result']['stdout'], zypper_out) zypper_mock.noraise.call.assert_called_with( 'dist-upgrade', '--auto-agree-with-licenses', '--from', 'DUMMY') @patch('salt.modules.zypper.refresh_db', MagicMock(return_value=True)) def test_upgrade_available(self): ''' Test whether or not an upgrade is available for a given package. :return: ''' ref_out = get_test_data('zypper-available.txt') with patch( 'salt.modules.zypper.__zypper__', ZyppCallMock( return_value=get_test_data('zypper-available.txt'))): for pkg_name in ['emacs', 'python']: self.assertFalse(zypper.upgrade_available(pkg_name)) self.assertTrue(zypper.upgrade_available('vim')) def test_list_pkgs(self): ''' Test packages listing. :return: ''' def _add_data(data, key, value): data[key] = value rpm_out = [ 'protobuf-java_|-2.6.1_|-3.1.develHead_|-', 'yast2-ftp-server_|-3.1.8_|-8.1_|-', 'jose4j_|-0.4.4_|-2.1.develHead_|-', 'apache-commons-cli_|-1.2_|-1.233_|-', 'jakarta-commons-discovery_|-0.4_|-129.686_|-', 'susemanager-build-keys-web_|-12.0_|-5.1.develHead_|-', ] with patch.dict( zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}): with patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}): with patch.dict(zypper.__salt__, {'pkg_resource.sort_pkglist': MagicMock()}): with patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}): pkgs = zypper.list_pkgs() for pkg_name, pkg_version in { 'jakarta-commons-discovery': '0.4-129.686', 'yast2-ftp-server': '3.1.8-8.1', 'protobuf-java': '2.6.1-3.1.develHead', 'susemanager-build-keys-web': '12.0-5.1.develHead', 'apache-commons-cli': '1.2-1.233', 'jose4j': '0.4.4-2.1.develHead' }.items(): self.assertTrue(pkgs.get(pkg_name)) self.assertEqual(pkgs[pkg_name], pkg_version) def test_download(self): ''' Test package download :return: ''' download_out = { 'stdout': get_test_data('zypper-download.xml'), 'stderr': None, 'retcode': 0 } test_out = { 'nmap': { 'path': u'/var/cache/zypp/packages/SLE-12-x86_64-Pool/x86_64/nmap-6.46-1.72.x86_64.rpm', 'repository-alias': u'SLE-12-x86_64-Pool', 'repository-name': u'SLE-12-x86_64-Pool' } } with patch.dict(zypper.__salt__, {'cmd.run_all': MagicMock(return_value=download_out)}): with patch.dict(zypper.__salt__, {'lowpkg.checksum': MagicMock(return_value=True)}): self.assertEqual(zypper.download("nmap"), test_out) test_out[ '_error'] = "The following package(s) failed to download: foo" self.assertEqual(zypper.download("nmap", "foo"), test_out) def test_remove_purge(self): ''' Test package removal :return: ''' class ListPackages(object): def __init__(self): self._packages = ['vim', 'pico'] self._pkgs = { 'vim': '0.18.0', 'emacs': '24.0.1', 'pico': '0.1.1', } def __call__(self): pkgs = self._pkgs.copy() for target in self._packages: if self._pkgs.get(target): del self._pkgs[target] return pkgs parsed_targets = [{'vim': None, 'pico': None}, None] cmd_out = {'retcode': 0, 'stdout': '', 'stderr': ''} # If config.get starts being used elsewhere, we'll need to write a # side_effect function. patches = { 'cmd.run_all': MagicMock(return_value=cmd_out), 'pkg_resource.parse_targets': MagicMock(return_value=parsed_targets), 'pkg_resource.stringify': MagicMock(), 'config.get': MagicMock(return_value=True) } with patch.dict(zypper.__salt__, patches): with patch('salt.modules.zypper.list_pkgs', ListPackages()): diff = zypper.remove(name='vim,pico') for pkg_name in ['vim', 'pico']: self.assertTrue(diff.get(pkg_name)) self.assertTrue(diff[pkg_name]['old']) self.assertFalse(diff[pkg_name]['new']) def test_repo_value_info(self): ''' Tests if repo info is properly parsed. :return: ''' repos_cfg = configparser.ConfigParser() for cfg in ['zypper-repo-1.cfg', 'zypper-repo-2.cfg']: repos_cfg.readfp(six.moves.StringIO(get_test_data(cfg))) for alias in repos_cfg.sections(): r_info = zypper._get_repo_info(alias, repos_cfg=repos_cfg) self.assertEqual(type(r_info['type']), type(None)) self.assertEqual(type(r_info['enabled']), bool) self.assertEqual(type(r_info['autorefresh']), bool) self.assertEqual(type(r_info['baseurl']), str) self.assertEqual(r_info['type'], None) self.assertEqual(r_info['enabled'], alias == 'SLE12-SP1-x86_64-Update') self.assertEqual(r_info['autorefresh'], alias == 'SLE12-SP1-x86_64-Update') def test_repo_add_nomod_noref(self): ''' Test mod_repo adds the new repo and nothing else :return: ''' zypper_patcher = patch.multiple('salt.modules.zypper', **self.zypper_patcher_config) url = self.new_repo_config['url'] name = self.new_repo_config['name'] with zypper_patcher: zypper.mod_repo(name, **{'url': url}) self.assertEqual(zypper.__zypper__.xml.call.call_args_list, [call('ar', url, name)]) zypper.__zypper__.refreshable.xml.call.assert_not_called() def test_repo_noadd_nomod_noref(self): ''' Test mod_repo detects the repo already exists, no modification was requested and no refresh requested either :return: ''' url = self.new_repo_config['url'] name = self.new_repo_config['name'] self.zypper_patcher_config['_get_configured_repos'] = Mock( **{'return_value.sections.return_value': [name]}) zypper_patcher = patch.multiple('salt.modules.zypper', **self.zypper_patcher_config) with zypper_patcher: self.assertEqual( zypper.mod_repo(name, **{'url': url}), { 'comment': 'Specified arguments did not result in modification of repo' }) zypper.__zypper__.xml.call.assert_not_called() zypper.__zypper__.refreshable.xml.call.assert_not_called() def test_repo_add_mod_noref(self): ''' Test mod_repo adds the new repo and call modify to update autorefresh :return: ''' zypper_patcher = patch.multiple('salt.modules.zypper', **self.zypper_patcher_config) url = self.new_repo_config['url'] name = self.new_repo_config['name'] with zypper_patcher: zypper.mod_repo(name, **{'url': url, 'refresh': True}) self.assertEqual(zypper.__zypper__.xml.call.call_args_list, [call('ar', url, name)]) zypper.__zypper__.refreshable.xml.call.assert_called_once_with( 'mr', '--refresh', name) def test_repo_noadd_mod_noref(self): ''' Test mod_repo detects the repository exists, calls modify to update 'autorefresh' but does not call refresh :return: ''' url = self.new_repo_config['url'] name = self.new_repo_config['name'] self.zypper_patcher_config['_get_configured_repos'] = Mock( **{'return_value.sections.return_value': [name]}) zypper_patcher = patch.multiple('salt.modules.zypper', **self.zypper_patcher_config) with zypper_patcher: zypper.mod_repo(name, **{'url': url, 'refresh': True}) zypper.__zypper__.xml.call.assert_not_called() zypper.__zypper__.refreshable.xml.call.assert_called_once_with( 'mr', '--refresh', name) def test_repo_add_nomod_ref(self): ''' Test mod_repo adds the new repo and refreshes the repo with `zypper --gpg-auto-import-keys refresh <repo-name>` :return: ''' zypper_patcher = patch.multiple('salt.modules.zypper', **self.zypper_patcher_config) url = self.new_repo_config['url'] name = self.new_repo_config['name'] with zypper_patcher: zypper.mod_repo(name, **{'url': url, 'gpgautoimport': True}) self.assertEqual(zypper.__zypper__.xml.call.call_args_list, [ call('ar', url, name), call('--gpg-auto-import-keys', 'refresh', name) ]) zypper.__zypper__.refreshable.xml.call.assert_not_called() def test_repo_noadd_nomod_ref(self): ''' Test mod_repo detects the repo already exists, has nothing to modify and refreshes the repo with `zypper --gpg-auto-import-keys refresh <repo-name>` :return: ''' url = self.new_repo_config['url'] name = self.new_repo_config['name'] self.zypper_patcher_config['_get_configured_repos'] = Mock( **{'return_value.sections.return_value': [name]}) zypper_patcher = patch.multiple('salt.modules.zypper', **self.zypper_patcher_config) with zypper_patcher: zypper.mod_repo(name, **{'url': url, 'gpgautoimport': True}) self.assertEqual(zypper.__zypper__.xml.call.call_args_list, [call('--gpg-auto-import-keys', 'refresh', name)]) zypper.__zypper__.refreshable.xml.call.assert_not_called() def test_repo_add_mod_ref(self): ''' Test mod_repo adds the new repo, calls modify to update 'autorefresh' and refreshes the repo with `zypper --gpg-auto-import-keys refresh <repo-name>` :return: ''' zypper_patcher = patch.multiple('salt.modules.zypper', **self.zypper_patcher_config) url = self.new_repo_config['url'] name = self.new_repo_config['name'] with zypper_patcher: zypper.mod_repo( name, **{ 'url': url, 'refresh': True, 'gpgautoimport': True }) self.assertEqual(zypper.__zypper__.xml.call.call_args_list, [ call('ar', url, name), call('--gpg-auto-import-keys', 'refresh', name) ]) zypper.__zypper__.refreshable.xml.call.assert_called_once_with( '--gpg-auto-import-keys', 'mr', '--refresh', name) def test_repo_noadd_mod_ref(self): ''' Test mod_repo detects the repo already exists, calls modify to update 'autorefresh' and refreshes the repo with `zypper --gpg-auto-import-keys refresh <repo-name>` :return: ''' url = self.new_repo_config['url'] name = self.new_repo_config['name'] self.zypper_patcher_config['_get_configured_repos'] = Mock( **{'return_value.sections.return_value': [name]}) zypper_patcher = patch.multiple('salt.modules.zypper', **self.zypper_patcher_config) with zypper_patcher: zypper.mod_repo( name, **{ 'url': url, 'refresh': True, 'gpgautoimport': True }) self.assertEqual(zypper.__zypper__.xml.call.call_args_list, [call('--gpg-auto-import-keys', 'refresh', name)]) zypper.__zypper__.refreshable.xml.call.assert_called_once_with( '--gpg-auto-import-keys', 'mr', '--refresh', name)
def test_managed(self): ''' Test to ensure that the named interface is configured properly ''' ret = {'name': 'salt', 'changes': {}, 'result': False, 'comment': ''} change = { 'interface': '--- \n+++ \n@@ -1 +1 @@\n-A\n+B', 'status': 'Interface salt restart to validate' } mock = MagicMock(side_effect=[AttributeError, 'A', 'A', 'A', 'A', 'A']) with patch.dict(network.__salt__, {"ip.get_interface": mock}): self.assertDictEqual(network.managed('salt', 'stack', test='a'), ret) mock = MagicMock(return_value='B') with patch.dict(network.__salt__, {"ip.build_interface": mock}): mock = MagicMock(side_effect=AttributeError) with patch.dict(network.__salt__, {"ip.get_bond": mock}): self.assertDictEqual( network.managed('salt', 'bond', test='a'), ret) ret.update({ 'comment': 'Interface salt is set to be' ' updated:\n--- \n+++ \n@@ -1 +1 @@\n-A\n+B', 'result': None }) self.assertDictEqual( network.managed('salt', 'stack', test='a'), ret) mock = MagicMock(return_value=True) with patch.dict(network.__salt__, {"ip.down": mock}): with patch.dict(network.__salt__, {"ip.up": mock}): ret.update({ 'comment': 'Interface salt updated.', 'result': True, 'changes': change }) self.assertDictEqual(network.managed('salt', 'stack'), ret) with patch.dict(network.__grains__, {"A": True}): with patch.dict( network.__salt__, {"saltutil.refresh_modules": mock}): ret.update({ 'result': True, 'changes': { 'interface': '--- \n+' '++ \n@@ -1 +1 @@\n-A' '\n+B', 'status': 'Interface' ' salt down' } }) self.assertDictEqual( network.managed('salt', 'stack', False), ret) ret.update({ 'changes': { 'interface': '--- \n+++ \n@@ -1 +1 @@\n-A\n+B' }, 'result': False, 'comment': "'ip.down'" }) self.assertDictEqual(network.managed('salt', 'stack'), ret)
def test_list_products(self): ''' List products test. ''' for filename, test_data in { 'zypper-products-sle12sp1.xml': { 'name': [ 'SLES', 'SLES', 'SUSE-Manager-Proxy', 'SUSE-Manager-Server', 'sle-manager-tools-beta', 'sle-manager-tools-beta-broken-eol', 'sle-manager-tools-beta-no-eol' ], 'vendor': 'SUSE LLC <https://www.suse.com/>', 'release': ['0', '0', '0', '0', '0', '0', '0'], 'productline': [None, None, None, None, None, None, 'sles'], 'eol_t': [ None, 0, 1509408000, 1522454400, 1522454400, 1730332800, 1730332800 ], 'isbase': [False, False, False, False, False, False, True], 'installed': [False, False, False, False, False, False, True], 'registerrelease': [None, None, None, None, None, None, '123'], }, 'zypper-products-sle11sp3.xml': { 'name': [ 'SUSE-Manager-Server', 'SUSE-Manager-Server', 'SUSE-Manager-Server-Broken-EOL', 'SUSE_SLES', 'SUSE_SLES', 'SUSE_SLES', 'SUSE_SLES-SP4-migration' ], 'vendor': 'SUSE LINUX Products GmbH, Nuernberg, Germany', 'release': ['1.138', '1.2', '1.2', '1.2', '1.201', '1.201', '1.4'], 'productline': [None, None, None, None, None, 'manager', 'manager'], 'eol_t': [None, 0, 0, 0, 0, 0, 0], 'isbase': [False, False, False, False, False, True, True], 'installed': [False, False, False, False, False, True, True], 'registerrelease': [None, None, None, None, None, None, "42"], } }.items(): ref_out = {'retcode': 0, 'stdout': get_test_data(filename)} with patch.dict(zypper.__salt__, {'cmd.run_all': MagicMock(return_value=ref_out)}): products = zypper.list_products() self.assertEqual(len(products), 7) self.assertIn(test_data['vendor'], [product['vendor'] for product in products]) for kwd in [ 'name', 'isbase', 'installed', 'release', 'productline', 'eol_t', 'registerrelease' ]: if six.PY3: self.assertCountEqual( test_data[kwd], [prod.get(kwd) for prod in products]) else: self.assertEqual( test_data[kwd], sorted([prod.get(kwd) for prod in products]))
def test_info_installed(self): ''' Test the return information of the named package(s), installed on the system. :return: ''' run_out = { 'virgo-dummy': { 'build_date': '2015-07-09T10:55:19Z', 'vendor': 'openSUSE Build Service', 'description': 'This is the Virgo dummy package used for testing SUSE Manager', 'license': 'GPL-2.0', 'build_host': 'sheep05', 'url': 'http://www.suse.com', 'build_date_time_t': 1436432119, 'relocations': '(not relocatable)', 'source_rpm': 'virgo-dummy-1.0-1.1.src.rpm', 'install_date': '2016-02-23T16:31:57Z', 'install_date_time_t': 1456241517, 'summary': 'Virgo dummy package', 'version': '1.0', 'signature': 'DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9', 'release': '1.1', 'group': 'Applications/System', 'arch': 'noarch', 'size': '17992' }, 'libopenssl1_0_0': { 'build_date': '2015-11-04T23:20:34Z', 'vendor': 'SUSE LLC <https://www.suse.com/>', 'description': 'The OpenSSL Project is a collaborative effort.', 'license': 'OpenSSL', 'build_host': 'sheep11', 'url': 'https://www.openssl.org/', 'build_date_time_t': 1446675634, 'relocations': '(not relocatable)', 'source_rpm': 'openssl-1.0.1i-34.1.src.rpm', 'install_date': '2016-02-23T16:31:35Z', 'install_date_time_t': 1456241495, 'summary': 'Secure Sockets and Transport Layer Security', 'version': '1.0.1i', 'signature': 'RSA/SHA256, Wed Nov 4 22:21:34 2015, Key ID 70af9e8139db7c82', 'release': '34.1', 'group': 'Productivity/Networking/Security', 'packager': 'https://www.suse.com/', 'arch': 'x86_64', 'size': '2576912' }, } with patch.dict(zypper.__salt__, {'lowpkg.info': MagicMock(return_value=run_out)}): installed = zypper.info_installed() # Test overall products length self.assertEqual(len(installed), 2) # Test translated fields for pkg_name, pkg_info in installed.items(): self.assertEqual(installed[pkg_name].get('source'), run_out[pkg_name]['source_rpm']) # Test keys transition from the lowpkg.info for pn_key, pn_val in run_out['virgo-dummy'].items(): if pn_key == 'source_rpm': continue self.assertEqual(installed['virgo-dummy'][pn_key], pn_val)
def write_crontab(*args, **kw): set_crontab('\n'.join([a.strip() for a in args[1]])) return MagicMock()
class CronTestCase(TestCase): @patch('salt.modules.cron.raw_cron', new=MagicMock(side_effect=get_crontab)) @patch('salt.modules.cron._write_cron_lines', new=MagicMock(side_effect=write_crontab)) def test__need_changes_new(self): ''' New behavior, identifier will get track of the managed lines! ''' # when there are no identifiers, # we do not touch it set_crontab(L + '# SALT_CRON_IDENTIFIER:booh\n' '* * * * * ls\n') cron.set_job( user='******', minute='*', hour='*', daymonth='*', month='*', dayweek='*', cmd='ls', comment=None, identifier=None, ) c1 = get_crontab() set_crontab(L + '* * * * * ls\n') self.assertEqual( c1, '# Lines below here are managed by Salt, do not edit\n' '# SALT_CRON_IDENTIFIER:booh\n' '* * * * * ls\n' '* * * * * ls') # whenever we have an identifier, hourray even without comment # we can match and edit the crontab in place # without cluttering the crontab with new cmds set_crontab(L + '# SALT_CRON_IDENTIFIER:bar\n' '* * * * * ls\n') cron.set_job( user='******', minute='*', hour='*', daymonth='*', month='*', dayweek='*', cmd='ls', comment=None, identifier='bar', ) c5 = get_crontab() set_crontab(L + '* * * * * ls\n') self.assertEqual( c5, '# Lines below here are managed by Salt, do not edit\n' '# SALT_CRON_IDENTIFIER:bar\n' '* * * * * ls\n') # we can even change the other parameters as well # thx to the id set_crontab(L + '# SALT_CRON_IDENTIFIER:bar\n* * * * * ls\n') cron.set_job( user='******', minute='1', hour='2', daymonth='3', month='4', dayweek='5', cmd='foo', comment='moo', identifier='bar', ) c6 = get_crontab() self.assertEqual( c6, '# Lines below here are managed by Salt, do not edit\n' '# moo SALT_CRON_IDENTIFIER:bar\n' '1 2 3 4 5 foo') def test__unicode_match(self): encoding = builtins.__salt_system_encoding__ builtins.__salt_system_encoding__ = 'utf-8' self.assertTrue(cron._cron_matched({'identifier': '1'}, 'foo', 1)) self.assertTrue(cron._cron_matched({'identifier': 'é'}, 'foo', 'é')) self.assertTrue(cron._cron_matched({'identifier': u'é'}, 'foo', 'é')) self.assertTrue(cron._cron_matched({'identifier': 'é'}, 'foo', u'é')) self.assertTrue(cron._cron_matched({'identifier': u'é'}, 'foo', u'é')) builtins.__salt_system_encoding__ = encoding @patch('salt.modules.cron._write_cron_lines', new=MagicMock(side_effect=write_crontab)) def test__need_changes_old(self): ''' old behavior; ID has no special action - If an id is found, it will be added as a new crontab even if there is a cmd that looks like this one - no comment, delete the cmd and readd it - comment: idem ''' with patch('salt.modules.cron.raw_cron', new=MagicMock(side_effect=get_crontab)): set_crontab(L + '* * * * * ls\n') cron.set_job( user='******', minute='*', hour='*', daymonth='*', month='*', dayweek='*', cmd='ls', comment=None, identifier=cron.SALT_CRON_NO_IDENTIFIER, ) c1 = get_crontab() set_crontab(L + '* * * * * ls\n') self.assertEqual( c1, '# Lines below here are managed by Salt, do not edit\n' '* * * * * ls\n') cron.set_job( user='******', minute='*', hour='*', daymonth='*', month='*', dayweek='*', cmd='ls', comment='foo', identifier=cron.SALT_CRON_NO_IDENTIFIER, ) c2 = get_crontab() self.assertEqual( c2, '# Lines below here are managed by Salt, do not edit\n' '# foo\n* * * * * ls') set_crontab(L + '* * * * * ls\n') cron.set_job( user='******', minute='*', hour='*', daymonth='*', month='*', dayweek='*', cmd='lsa', comment='foo', identifier='bar', ) c3 = get_crontab() self.assertEqual( c3, '# Lines below here are managed by Salt, do not edit\n' '* * * * * ls\n' '# foo SALT_CRON_IDENTIFIER:bar\n' '* * * * * lsa') set_crontab(L + '* * * * * ls\n') cron.set_job( user='******', minute='*', hour='*', daymonth='*', month='*', dayweek='*', cmd='foo', comment='foo', identifier='bar', ) c4 = get_crontab() self.assertEqual( c4, '# Lines below here are managed by Salt, do not edit\n' '* * * * * ls\n' '# foo SALT_CRON_IDENTIFIER:bar\n' '* * * * * foo') set_crontab(L + '* * * * * ls\n') cron.set_job( user='******', minute='*', hour='*', daymonth='*', month='*', dayweek='*', cmd='ls', comment='foo', identifier='bbar', ) c4 = get_crontab() self.assertEqual( c4, '# Lines below here are managed by Salt, do not edit\n' '# foo SALT_CRON_IDENTIFIER:bbar\n' '* * * * * ls') @patch('salt.modules.cron._write_cron_lines', new=MagicMock(side_effect=write_crontab)) def test__issue10959(self): ''' handle multi old style crontabs https://github.com/saltstack/salt/issues/10959 ''' with patch('salt.modules.cron.raw_cron', new=MagicMock(side_effect=get_crontab)): set_crontab( '# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' # as managed per salt, the last lines will be merged together ! '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * samecmd\n' '* * * * * otheridcmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n0 * * * * samecmd1\n' '1 * * * * samecmd1\n' '0 * * * * otheridcmd1\n' '1 * * * * otheridcmd1\n' # special case here, none id managed line with same command # as a later id managed line will become managed '# SALT_CRON_IDENTIFIER:1\n0 * * * * otheridcmd1\n' '# SALT_CRON_IDENTIFIER:2\n0 * * * * otheridcmd1\n') crons1 = cron.list_tab('root') # the filtering is done on save, we reflect in listing # the same that we have in a file, no matter what we # have self.assertEqual( crons1, { 'crons': [{ 'cmd': 'ls', 'comment': 'uoo', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*' }, { 'cmd': 'too', 'comment': 'uuoo', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*' }, { 'cmd': 'zoo', 'comment': 'uuuoo', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*' }, { 'cmd': 'yoo', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*' }, { 'cmd': 'xoo', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*' }, { 'cmd': 'samecmd', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*' }, { 'cmd': 'samecmd', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '*', 'month': '*' }, { 'cmd': 'otheridcmd', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '*', 'month': '*' }, { 'cmd': 'otheridcmd', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '*', 'month': '*' }, { 'cmd': 'samecmd1', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '0', 'month': '*' }, { 'cmd': 'samecmd1', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '1', 'month': '*' }, { 'cmd': 'otheridcmd1', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '0', 'month': '*' }, { 'cmd': 'otheridcmd1', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '1', 'month': '*' }, { 'cmd': 'otheridcmd1', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': '1', 'minute': '0', 'month': '*' }, { 'cmd': 'otheridcmd1', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': '2', 'minute': '0', 'month': '*' }], 'env': [], 'pre': [], 'special': [] }) # so yood so far, no problem for now, trying to save the # multilines without id crons now inc_tests = [ ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n' '0 * * * * samecmd1'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n1 * * * * samecmd1'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n1 * * * * samecmd1\n' '0 * * * * otheridcmd1'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n1 * * * * samecmd1\n' '1 * * * * otheridcmd1'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n1 * * * * samecmd1\n' '# SALT_CRON_IDENTIFIER:1\n0 * * * * otheridcmd1'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n1 * * * * samecmd1\n' '# SALT_CRON_IDENTIFIER:1\n0 * * * * otheridcmd1\n' '# SALT_CRON_IDENTIFIER:2\n0 * * * * otheridcmd1') ] set_crontab('') for idx, cr in enumerate(crons1['crons']): cron.set_job('root', **cr) self.assertEqual(get_crontab(), inc_tests[idx], ("idx {0}\n'{1}'\n != \n'{2}'\n\n\n" "\'{1}\' != \'{2}\'").format( idx, get_crontab(), inc_tests[idx])) @patch('salt.modules.cron.raw_cron', new=MagicMock(side_effect=[ (L + '\n'), (L + '* * * * * ls\nn'), (L + '# foo\n' '* * * * * ls\n'), (L + '# foo {0}:blah\n'.format(cron.SALT_CRON_IDENTIFIER) + '* * * * * ls\n'), ])) def test__load_tab(self): cron.__grains__ = __grains__ with patch.dict(cron.__grains__, {'os_family': 'Solaris'}): crons1 = cron.list_tab('root') crons2 = cron.list_tab('root') crons3 = cron.list_tab('root') crons4 = cron.list_tab('root') self.assertEqual(crons1, { 'pre': [], 'crons': [], 'env': [], 'special': [] }) self.assertEqual( crons2['crons'][0], { 'comment': None, 'dayweek': '*', 'hour': '*', 'identifier': None, 'cmd': 'ls', 'daymonth': '*', 'minute': '*', 'month': '*' }) self.assertEqual( crons3['crons'][0], { 'comment': 'foo', 'dayweek': '*', 'hour': '*', 'identifier': None, 'cmd': 'ls', 'daymonth': '*', 'minute': '*', 'month': '*' }) self.assertEqual( crons4['crons'][0], { 'comment': 'foo', 'dayweek': '*', 'hour': '*', 'identifier': 'blah', 'cmd': 'ls', 'daymonth': '*', 'minute': '*', 'month': '*' })
def test__need_changes_old(self): ''' old behavior; ID has no special action - If an id is found, it will be added as a new crontab even if there is a cmd that looks like this one - no comment, delete the cmd and readd it - comment: idem ''' with patch('salt.modules.cron.raw_cron', new=MagicMock(side_effect=get_crontab)): set_crontab(L + '* * * * * ls\n') cron.set_job( user='******', minute='*', hour='*', daymonth='*', month='*', dayweek='*', cmd='ls', comment=None, identifier=cron.SALT_CRON_NO_IDENTIFIER, ) c1 = get_crontab() set_crontab(L + '* * * * * ls\n') self.assertEqual( c1, '# Lines below here are managed by Salt, do not edit\n' '* * * * * ls\n') cron.set_job( user='******', minute='*', hour='*', daymonth='*', month='*', dayweek='*', cmd='ls', comment='foo', identifier=cron.SALT_CRON_NO_IDENTIFIER, ) c2 = get_crontab() self.assertEqual( c2, '# Lines below here are managed by Salt, do not edit\n' '# foo\n* * * * * ls') set_crontab(L + '* * * * * ls\n') cron.set_job( user='******', minute='*', hour='*', daymonth='*', month='*', dayweek='*', cmd='lsa', comment='foo', identifier='bar', ) c3 = get_crontab() self.assertEqual( c3, '# Lines below here are managed by Salt, do not edit\n' '* * * * * ls\n' '# foo SALT_CRON_IDENTIFIER:bar\n' '* * * * * lsa') set_crontab(L + '* * * * * ls\n') cron.set_job( user='******', minute='*', hour='*', daymonth='*', month='*', dayweek='*', cmd='foo', comment='foo', identifier='bar', ) c4 = get_crontab() self.assertEqual( c4, '# Lines below here are managed by Salt, do not edit\n' '* * * * * ls\n' '# foo SALT_CRON_IDENTIFIER:bar\n' '* * * * * foo') set_crontab(L + '* * * * * ls\n') cron.set_job( user='******', minute='*', hour='*', daymonth='*', month='*', dayweek='*', cmd='ls', comment='foo', identifier='bbar', ) c4 = get_crontab() self.assertEqual( c4, '# Lines below here are managed by Salt, do not edit\n' '# foo SALT_CRON_IDENTIFIER:bbar\n' '* * * * * ls')
def test__issue10959(self): ''' handle multi old style crontabs https://github.com/saltstack/salt/issues/10959 ''' with patch('salt.modules.cron.raw_cron', new=MagicMock(side_effect=get_crontab)): set_crontab( '# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' # as managed per salt, the last lines will be merged together ! '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * samecmd\n' '* * * * * otheridcmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n0 * * * * samecmd1\n' '1 * * * * samecmd1\n' '0 * * * * otheridcmd1\n' '1 * * * * otheridcmd1\n' # special case here, none id managed line with same command # as a later id managed line will become managed '# SALT_CRON_IDENTIFIER:1\n0 * * * * otheridcmd1\n' '# SALT_CRON_IDENTIFIER:2\n0 * * * * otheridcmd1\n') crons1 = cron.list_tab('root') # the filtering is done on save, we reflect in listing # the same that we have in a file, no matter what we # have self.assertEqual( crons1, { 'crons': [{ 'cmd': 'ls', 'comment': 'uoo', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*' }, { 'cmd': 'too', 'comment': 'uuoo', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*' }, { 'cmd': 'zoo', 'comment': 'uuuoo', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*' }, { 'cmd': 'yoo', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*' }, { 'cmd': 'xoo', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*' }, { 'cmd': 'samecmd', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*' }, { 'cmd': 'samecmd', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '*', 'month': '*' }, { 'cmd': 'otheridcmd', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '*', 'month': '*' }, { 'cmd': 'otheridcmd', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '*', 'month': '*' }, { 'cmd': 'samecmd1', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '0', 'month': '*' }, { 'cmd': 'samecmd1', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '1', 'month': '*' }, { 'cmd': 'otheridcmd1', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '0', 'month': '*' }, { 'cmd': 'otheridcmd1', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '1', 'month': '*' }, { 'cmd': 'otheridcmd1', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': '1', 'minute': '0', 'month': '*' }, { 'cmd': 'otheridcmd1', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': '2', 'minute': '0', 'month': '*' }], 'env': [], 'pre': [], 'special': [] }) # so yood so far, no problem for now, trying to save the # multilines without id crons now inc_tests = [ ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n' '0 * * * * samecmd1'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n1 * * * * samecmd1'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n1 * * * * samecmd1\n' '0 * * * * otheridcmd1'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n1 * * * * samecmd1\n' '1 * * * * otheridcmd1'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n1 * * * * samecmd1\n' '# SALT_CRON_IDENTIFIER:1\n0 * * * * otheridcmd1'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n1 * * * * samecmd1\n' '# SALT_CRON_IDENTIFIER:1\n0 * * * * otheridcmd1\n' '# SALT_CRON_IDENTIFIER:2\n0 * * * * otheridcmd1') ] set_crontab('') for idx, cr in enumerate(crons1['crons']): cron.set_job('root', **cr) self.assertEqual(get_crontab(), inc_tests[idx], ("idx {0}\n'{1}'\n != \n'{2}'\n\n\n" "\'{1}\' != \'{2}\'").format( idx, get_crontab(), inc_tests[idx]))
def test_pkill(self, send_signal_mock): mocked_proc.send_signal = MagicMock() test_signal = 1234 ps.pkill(_get_proc_name(mocked_proc), signal=test_signal) self.assertEqual(mocked_proc.send_signal.call_args, call(test_signal))
class PsTestCase(TestCase): def setUp(self): if PSUTIL2: MOCK_PROC.name = Mock(return_value="test_mock_proc") MOCK_PROC.pid = Mock(return_value=9999999999) else: MOCK_PROC.name = 'test_mock_proc' MOCK_PROC.pid = 9999999999 @patch('psutil.get_pid_list', new=MagicMock(return_value=STUB_PID_LIST)) def test_get_pid_list(self): self.assertListEqual(STUB_PID_LIST, ps.get_pid_list()) @patch('psutil.Process') def test_kill_pid(self, send_signal_mock): ps.kill_pid(0, signal=999) self.assertEqual(send_signal_mock.call_args, call(0)) @patch('psutil.Process.send_signal') @patch('psutil.process_iter', new=MagicMock(return_value=[MOCK_PROC])) def test_pkill(self, send_signal_mock): mocked_proc.send_signal = MagicMock() test_signal = 1234 ps.pkill(_get_proc_name(mocked_proc), signal=test_signal) self.assertEqual(mocked_proc.send_signal.call_args, call(test_signal)) @patch('psutil.process_iter', new=MagicMock(return_value=[MOCK_PROC])) def test_pgrep(self): self.assertIn(_get_proc_pid(MOCK_PROC), ps.pgrep(_get_proc_name(MOCK_PROC))) @patch('psutil.cpu_percent', new=MagicMock(return_value=1)) def test_cpu_percent(self): self.assertEqual(ps.cpu_percent(), 1) @patch('psutil.cpu_times', new=MagicMock(return_value=STUB_CPU_TIMES)) def test_cpu_times(self): self.assertDictEqual({ 'idle': 4, 'nice': 2, 'system': 3, 'user': 1 }, ps.cpu_times()) @skipIf(HAS_PSUTIL_VERSION is False, 'psutil 0.6.0 or greater is required for this test') @patch('psutil.virtual_memory', new=MagicMock(return_value=STUB_VIRT_MEM)) def test_virtual_memory(self): self.assertDictEqual( { 'used': 500, 'total': 1000, 'available': 500, 'percent': 50, 'free': 500 }, ps.virtual_memory()) @skipIf(HAS_PSUTIL_VERSION is False, 'psutil 0.6.0 or greater is required for this test') @patch('psutil.swap_memory', new=MagicMock(return_value=STUB_SWAP_MEM)) def test_swap_memory(self): self.assertDictEqual( { 'used': 500, 'total': 1000, 'percent': 50, 'free': 500, 'sin': 0, 'sout': 0 }, ps.swap_memory()) @patch('psutil.disk_partitions', new=MagicMock(return_value=[STUB_DISK_PARTITION])) def test_disk_partitions(self): self.assertDictEqual( { 'device': '/dev/disk0s2', 'mountpoint': '/', 'opts': 'rw,local,rootfs,dovolfs,journaled,multilabel', 'fstype': 'hfs' }, ps.disk_partitions()[0]) @patch('psutil.disk_usage', new=MagicMock(return_value=STUB_DISK_USAGE)) def test_disk_usage(self): self.assertDictEqual( { 'used': 500, 'total': 1000, 'percent': 50, 'free': 500 }, ps.disk_usage('DUMMY_PATH')) @patch('psutil.disk_partitions', new=MagicMock(return_value=[STUB_DISK_PARTITION])) def test_disk_partition_usage(self): self.assertDictEqual( { 'device': '/dev/disk0s2', 'mountpoint': '/', 'opts': 'rw,local,rootfs,dovolfs,journaled,multilabel', 'fstype': 'hfs' }, ps.disk_partitions()[0]) ## Should only be tested in integration # def test_total_physical_memory(self): # pass ## Should only be tested in integration # def test_num_cpus(self): # pass ## Should only be tested in integration # def test_boot_time(self): # pass @patch('psutil.network_io_counters', new=MagicMock(return_value=STUB_NETWORK_IO)) def test_network_io_counters(self): self.assertDictEqual( { 'packets_sent': 500, 'packets_recv': 600, 'bytes_recv': 2000, 'dropout': 4, 'bytes_sent': 1000, 'errout': 2, 'errin': 1, 'dropin': 3 }, ps.network_io_counters()) @patch('psutil.disk_io_counters', new=MagicMock(return_value=STUB_DISK_IO)) def test_disk_io_counters(self): self.assertDictEqual( { 'read_time': 2000, 'write_bytes': 600, 'read_bytes': 500, 'write_time': 3000, 'read_count': 1000, 'write_count': 2000 }, ps.disk_io_counters()) @patch('psutil.get_users', new=MagicMock(return_value=[STUB_USER])) def test_get_users(self): self.assertDictEqual( { 'terminal': 'ttys000', 'started': 0.0, 'host': 'localhost', 'name': 'bdobbs' }, ps.get_users()[0])
'iostat', 'read_count, write_count, read_bytes, write_bytes, read_time, write_time' )(1000, 2000, 500, 600, 2000, 3000) STUB_USER = psutil._compat.namedtuple( 'user', 'name, terminal, host, started')('bdobbs', 'ttys000', 'localhost', 0.0) if psutil.version_info >= (0, 6, 0): HAS_PSUTIL_VERSION = True else: (STUB_CPU_TIMES, STUB_VIRT_MEM, STUB_SWAP_MEM, STUB_PHY_MEM_USAGE, STUB_DISK_PARTITION, STUB_DISK_USAGE, STUB_NETWORK_IO, STUB_DISK_IO, STUB_USER) = [None for val in range(9)] STUB_PID_LIST = [0, 1, 2, 3] MOCK_PROC = mocked_proc = MagicMock('psutil.Process') try: import utmp # pylint: disable=W0611 HAS_UTMP = True except ImportError: HAS_UTMP = False def _get_proc_name(proc): return proc.name() if PSUTIL2 else proc.name def _get_proc_pid(proc): return proc.pid() if PSUTIL2 else proc.pid
class DockerngTestCase(TestCase): ''' Validate dockerng module ''' def test_ps_with_host_true(self): ''' Check that dockerng.ps called with host is ``True``, include resutlt of ``network.interfaces`` command in returned result. ''' network_interfaces = Mock(return_value={'mocked': None}) with patch.dict(dockerng_mod.__salt__, {'network.interfaces': network_interfaces}): with patch.dict(dockerng_mod.__context__, {'docker.client': MagicMock()}): ret = dockerng_mod.ps_(host=True) self.assertEqual(ret, {'host': { 'interfaces': { 'mocked': None } }}) def test_ps_with_filters(self): ''' Check that dockerng.ps accept filters parameter. ''' client = MagicMock() with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod.ps_(filters={'label': 'KEY'}) client.containers.assert_called_once_with(all=True, filters={'label': 'KEY'}) @skipIf(_docker_py_version() is None, 'docker-py needs to be installed for this test to run') @patch.object(dockerng_mod, '_get_exec_driver') def test_check_mine_cache_is_refreshed_on_container_change_event(self, _): ''' Every command that might modify docker containers state. Should trig an update on ``mine.send`` ''' for command_name, args in ( ('create', ()), ('rm_', ()), ('kill', ()), ('pause', ()), ('signal_', ('KILL', )), ('start', ()), ('stop', ()), ('unpause', ()), ('_run', ('command', )), ('_script', ('command', )), ): mine_send = Mock() command = getattr(dockerng_mod, command_name) docker_client = MagicMock() docker_client.api_version = '1.12' with patch.dict( dockerng_mod.__salt__, { 'mine.send': mine_send, 'container_resource.run': MagicMock(), 'cp.cache_file': MagicMock(return_value=False) }): with patch.dict(dockerng_mod.__context__, {'docker.client': docker_client}): command('container', *args) mine_send.assert_called_with('dockerng.ps', verbose=True, all=True, host=True) @skipIf(_docker_py_version() < ( 1, 4, 0 ), 'docker module must be installed to run this test or is too old. >=1.4.0' ) @patch.object(dockerng_mod, 'images', MagicMock()) @patch.object(dockerng_mod, 'inspect_image') @patch.object(dockerng_mod, 'version', Mock(return_value={'ApiVersion': '1.19'})) def test_create_with_arg_cmd(self, *args): ''' When cmd argument is passed check it is renamed to command. ''' __salt__ = { 'config.get': Mock(), 'mine.send': Mock(), } host_config = {} client = Mock() client.api_version = '1.19' client.create_host_config.return_value = host_config client.create_container.return_value = {} with patch.dict(dockerng_mod.__dict__, {'__salt__': __salt__}): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod.create('image', cmd='ls', name='ctn') client.create_container.assert_called_once_with( command='ls', host_config=host_config, image='image', name='ctn') @skipIf(_docker_py_version() < ( 1, 4, 0 ), 'docker module must be installed to run this test or is too old. >=1.4.0' ) @patch.object(dockerng_mod, 'images', MagicMock()) @patch.object(dockerng_mod, 'inspect_image') @patch.object(dockerng_mod, 'version', Mock(return_value={'ApiVersion': '1.19'})) def test_create_send_host_config(self, *args): ''' Check host_config object is passed to create_container. ''' __salt__ = { 'config.get': Mock(), 'mine.send': Mock(), } host_config = {'PublishAllPorts': True} client = Mock() client.api_version = '1.19' client.create_host_config.return_value = host_config client.create_container.return_value = {} with patch.dict(dockerng_mod.__dict__, {'__salt__': __salt__}): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod.create('image', name='ctn', publish_all_ports=True) client.create_container.assert_called_once_with( host_config=host_config, image='image', name='ctn') @skipIf(_docker_py_version() < ( 1, 4, 0 ), 'docker module must be installed to run this test or is too old. >=1.4.0' ) @patch.object(dockerng_mod, 'images', MagicMock()) @patch.object(dockerng_mod, 'inspect_image') @patch.object(dockerng_mod, 'version', Mock(return_value={'ApiVersion': '1.19'})) def test_create_with_labels_dict(self, *args): ''' Create container with labels dictionary. ''' __salt__ = { 'config.get': Mock(), 'mine.send': Mock(), } host_config = {} client = Mock() client.api_version = '1.19' client.create_host_config.return_value = host_config client.create_container.return_value = {} with patch.dict(dockerng_mod.__dict__, {'__salt__': __salt__}): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod.create( 'image', name='ctn', labels={'KEY': 'VALUE'}, validate_input=True, ) client.create_container.assert_called_once_with( labels={'KEY': 'VALUE'}, host_config=host_config, image='image', name='ctn', ) @skipIf(_docker_py_version() < ( 1, 4, 0 ), 'docker module must be installed to run this test or is too old. >=1.4.0' ) @patch.object(dockerng_mod, 'images', MagicMock()) @patch.object(dockerng_mod, 'inspect_image') @patch.object(dockerng_mod, 'version', Mock(return_value={'ApiVersion': '1.19'})) def test_create_with_labels_list(self, *args): ''' Create container with labels list. ''' __salt__ = { 'config.get': Mock(), 'mine.send': Mock(), } host_config = {} client = Mock() client.api_version = '1.19' client.create_host_config.return_value = host_config client.create_container.return_value = {} with patch.dict(dockerng_mod.__dict__, {'__salt__': __salt__}): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod.create( 'image', name='ctn', labels=['KEY1', 'KEY2'], validate_input=True, ) client.create_container.assert_called_once_with( labels=['KEY1', 'KEY2'], host_config=host_config, image='image', name='ctn', ) @skipIf(_docker_py_version() < ( 1, 4, 0 ), 'docker module must be installed to run this test or is too old. >=1.4.0' ) @patch.object(dockerng_mod, 'images', MagicMock()) @patch.object(dockerng_mod, 'inspect_image') @patch.object(dockerng_mod, 'version', Mock(return_value={'ApiVersion': '1.19'})) def test_create_with_labels_error(self, *args): ''' Create container with invalid labels. ''' __salt__ = { 'config.get': Mock(), 'mine.send': Mock(), } host_config = {} client = Mock() client.api_version = '1.19' client.create_host_config.return_value = host_config client.create_container.return_value = {} with patch.dict(dockerng_mod.__dict__, {'__salt__': __salt__}): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): self.assertRaises( SaltInvocationError, dockerng_mod.create, 'image', name='ctn', labels=22, validate_input=True, ) @skipIf(_docker_py_version() < ( 1, 4, 0 ), 'docker module must be installed to run this test or is too old. >=1.4.0' ) @patch.object(dockerng_mod, 'images', MagicMock()) @patch.object(dockerng_mod, 'inspect_image') @patch.object(dockerng_mod, 'version', Mock(return_value={'ApiVersion': '1.19'})) def test_create_with_labels_dictlist(self, *args): ''' Create container with labels dictlist. ''' __salt__ = { 'config.get': Mock(), 'mine.send': Mock(), } host_config = {} client = Mock() client.api_version = '1.19' client.create_host_config.return_value = host_config client.create_container.return_value = {} with patch.dict(dockerng_mod.__dict__, {'__salt__': __salt__}): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod.create( 'image', name='ctn', labels=[{ 'KEY1': 'VALUE1' }, { 'KEY2': 'VALUE2' }], validate_input=True, ) client.create_container.assert_called_once_with( labels={ 'KEY1': 'VALUE1', 'KEY2': 'VALUE2' }, host_config=host_config, image='image', name='ctn', ) @skipIf(_docker_py_version() < ( 1, 5, 0 ), 'docker module must be installed to run this test or is too old. >=1.5.0' ) def test_list_networks(self, *args): ''' test list networks. ''' __salt__ = { 'config.get': Mock(), 'mine.send': Mock(), } host_config = {} client = Mock() client.api_version = '1.21' with patch.dict(dockerng_mod.__dict__, {'__salt__': __salt__}): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod.networks( names=['foo'], ids=['01234'], ) client.networks.assert_called_once_with( names=['foo'], ids=['01234'], ) @skipIf(_docker_py_version() < ( 1, 5, 0 ), 'docker module must be installed to run this test or is too old. >=1.5.0' ) def test_create_network(self, *args): ''' test create network. ''' __salt__ = { 'config.get': Mock(), 'mine.send': Mock(), } host_config = {} client = Mock() client.api_version = '1.21' with patch.dict(dockerng_mod.__dict__, {'__salt__': __salt__}): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod.create_network( 'foo', driver='bridge', ) client.create_network.assert_called_once_with( 'foo', driver='bridge', ) @skipIf(_docker_py_version() < ( 1, 5, 0 ), 'docker module must be installed to run this test or is too old. >=1.5.0' ) def test_remove_network(self, *args): ''' test remove network. ''' __salt__ = { 'config.get': Mock(), 'mine.send': Mock(), } host_config = {} client = Mock() client.api_version = '1.21' with patch.dict(dockerng_mod.__dict__, {'__salt__': __salt__}): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod.remove_network('foo') client.remove_network.assert_called_once_with('foo') @skipIf(_docker_py_version() < ( 1, 5, 0 ), 'docker module must be installed to run this test or is too old. >=1.5.0' ) def test_inspect_network(self, *args): ''' test inspect network. ''' __salt__ = { 'config.get': Mock(), 'mine.send': Mock(), } host_config = {} client = Mock() client.api_version = '1.21' with patch.dict(dockerng_mod.__dict__, {'__salt__': __salt__}): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod.inspect_network('foo') client.inspect_network.assert_called_once_with('foo') @skipIf(_docker_py_version() < ( 1, 5, 0 ), 'docker module must be installed to run this test or is too old. >=1.5.0' ) def test_connect_container_to_network(self, *args): ''' test inspect network. ''' __salt__ = { 'config.get': Mock(), 'mine.send': Mock(), } host_config = {} client = Mock() client.api_version = '1.21' with patch.dict(dockerng_mod.__dict__, {'__salt__': __salt__}): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod.connect_container_to_network('container', 'foo') client.connect_container_to_network.assert_called_once_with( 'container', 'foo') @skipIf(_docker_py_version() < ( 1, 5, 0 ), 'docker module must be installed to run this test or is too old. >=1.5.0' ) def test_disconnect_container_from_network(self, *args): ''' test inspect network. ''' __salt__ = { 'config.get': Mock(), 'mine.send': Mock(), } host_config = {} client = Mock() client.api_version = '1.21' with patch.dict(dockerng_mod.__dict__, {'__salt__': __salt__}): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod.disconnect_container_from_network( 'container', 'foo') client.disconnect_container_from_network.assert_called_once_with( 'container', 'foo') @skipIf(_docker_py_version() < ( 1, 5, 0 ), 'docker module must be installed to run this test or is too old. >=1.5.0' ) def test_list_volumes(self, *args): ''' test list volumes. ''' __salt__ = { 'config.get': Mock(), 'mine.send': Mock(), } client = Mock() client.api_version = '1.21' with patch.dict(dockerng_mod.__dict__, {'__salt__': __salt__}): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod.volumes(filters={'dangling': [True]}, ) client.volumes.assert_called_once_with(filters={'dangling': [True]}, ) @skipIf(_docker_py_version() < ( 1, 5, 0 ), 'docker module must be installed to run this test or is too old. >=1.5.0' ) def test_create_volume(self, *args): ''' test create volume. ''' __salt__ = { 'config.get': Mock(), 'mine.send': Mock(), } client = Mock() client.api_version = '1.21' with patch.dict(dockerng_mod.__dict__, {'__salt__': __salt__}): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod.create_volume( 'foo', driver='bridge', driver_opts={}, ) client.create_volume.assert_called_once_with( 'foo', driver='bridge', driver_opts={}, ) @skipIf(_docker_py_version() < ( 1, 5, 0 ), 'docker module must be installed to run this test or is too old. >=1.5.0' ) def test_remove_volume(self, *args): ''' test remove volume. ''' __salt__ = { 'config.get': Mock(), 'mine.send': Mock(), } client = Mock() client.api_version = '1.21' with patch.dict(dockerng_mod.__dict__, {'__salt__': __salt__}): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod.remove_volume('foo') client.remove_volume.assert_called_once_with('foo') @skipIf(_docker_py_version() < ( 1, 5, 0 ), 'docker module must be installed to run this test or is too old. >=1.5.0' ) def test_inspect_volume(self, *args): ''' test inspect volume. ''' __salt__ = { 'config.get': Mock(), 'mine.send': Mock(), } client = Mock() client.api_version = '1.21' with patch.dict(dockerng_mod.__dict__, {'__salt__': __salt__}): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod.inspect_volume('foo') client.inspect_volume.assert_called_once_with('foo') def test_wait_success(self): client = Mock() client.api_version = '1.21' client.wait = Mock(return_value=0) dockerng_inspect_container = Mock(side_effect=[{ 'State': { 'Running': True } }, { 'State': { 'Stopped': True } }]) with patch.object(dockerng_mod, 'inspect_container', dockerng_inspect_container): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod._clear_context() result = dockerng_mod.wait('foo') self.assertEqual( result, { 'result': True, 'exit_status': 0, 'state': { 'new': 'stopped', 'old': 'running' } }) def test_wait_fails_already_stopped(self): client = Mock() client.api_version = '1.21' client.wait = Mock(return_value=0) dockerng_inspect_container = Mock(side_effect=[ { 'State': { 'Stopped': True } }, { 'State': { 'Stopped': True } }, ]) with patch.object(dockerng_mod, 'inspect_container', dockerng_inspect_container): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod._clear_context() result = dockerng_mod.wait('foo') self.assertEqual( result, { 'result': False, 'comment': "Container 'foo' already stopped", 'exit_status': 0, 'state': { 'new': 'stopped', 'old': 'stopped' } }) def test_wait_success_already_stopped(self): client = Mock() client.api_version = '1.21' client.wait = Mock(return_value=0) dockerng_inspect_container = Mock(side_effect=[ { 'State': { 'Stopped': True } }, { 'State': { 'Stopped': True } }, ]) with patch.object(dockerng_mod, 'inspect_container', dockerng_inspect_container): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod._clear_context() result = dockerng_mod.wait('foo', ignore_already_stopped=True) self.assertEqual( result, { 'result': True, 'comment': "Container 'foo' already stopped", 'exit_status': 0, 'state': { 'new': 'stopped', 'old': 'stopped' } }) def test_wait_success_absent_container(self): client = Mock() client.api_version = '1.21' dockerng_inspect_container = Mock(side_effect=CommandExecutionError) with patch.object(dockerng_mod, 'inspect_container', dockerng_inspect_container): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod._clear_context() result = dockerng_mod.wait('foo', ignore_already_stopped=True) self.assertEqual(result, { 'result': True, 'comment': "Container 'foo' absent" }) def test_wait_fails_on_exit_status(self): client = Mock() client.api_version = '1.21' client.wait = Mock(return_value=1) dockerng_inspect_container = Mock(side_effect=[{ 'State': { 'Running': True } }, { 'State': { 'Stopped': True } }]) with patch.object(dockerng_mod, 'inspect_container', dockerng_inspect_container): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod._clear_context() result = dockerng_mod.wait('foo', fail_on_exit_status=True) self.assertEqual( result, { 'result': False, 'exit_status': 1, 'state': { 'new': 'stopped', 'old': 'running' } }) def test_wait_fails_on_exit_status_and_already_stopped(self): client = Mock() client.api_version = '1.21' client.wait = Mock(return_value=1) dockerng_inspect_container = Mock(side_effect=[{ 'State': { 'Stopped': True } }, { 'State': { 'Stopped': True } }]) with patch.object(dockerng_mod, 'inspect_container', dockerng_inspect_container): with patch.dict(dockerng_mod.__context__, {'docker.client': client}): dockerng_mod._clear_context() result = dockerng_mod.wait('foo', ignore_already_stopped=True, fail_on_exit_status=True) self.assertEqual( result, { 'result': False, 'comment': "Container 'foo' already stopped", 'exit_status': 1, 'state': { 'new': 'stopped', 'old': 'stopped' } }) def test_sls_build(self, *args): ''' test build sls image. ''' docker_start_mock = MagicMock(return_value={}) docker_create_mock = MagicMock(return_value={ 'Id': 'ID', 'Name': 'NAME' }) docker_stop_mock = MagicMock(return_value={ 'state': { 'old': 'running', 'new': 'stopped' }, 'result': True }) docker_commit_mock = MagicMock(return_value={ 'Id': 'ID2', 'Image': 'foo', 'Time_Elapsed': 42 }) docker_sls_mock = MagicMock( return_value={ "file_|-/etc/test.sh_|-/etc/test.sh_|-managed": { "comment": "File /etc/test.sh is in the correct state", "name": "/etc/test.sh", "start_time": "07:04:26.834792", "result": True, "duration": 13.492, "__run_num__": 0, "changes": {} }, "test_|-always-passes_|-foo_|-succeed_without_changes": { "comment": "Success!", "name": "foo", "start_time": "07:04:26.848915", "result": True, "duration": 0.363, "__run_num__": 1, "changes": {} } }) ret = None with patch.dict( dockerng_mod.__salt__, { 'dockerng.start': docker_start_mock, 'dockerng.create': docker_create_mock, 'dockerng.stop': docker_stop_mock, 'dockerng.commit': docker_commit_mock, 'dockerng.sls': docker_sls_mock }): ret = dockerng_mod.sls_build( 'foo', mods='foo', ) docker_create_mock.assert_called_once_with( cmd='/usr/bin/sleep infinity', image='opensuse/python', interactive=True, tty=True) docker_start_mock.assert_called_once_with('ID') docker_sls_mock.assert_called_once_with('ID', 'foo', 'base') docker_stop_mock.assert_called_once_with('ID') docker_commit_mock.assert_called_once_with('ID', 'foo') self.assertEqual({ 'Id': 'ID2', 'Image': 'foo', 'Time_Elapsed': 42 }, ret) def test_call_success(self): ''' test module calling inside containers ''' docker_run_all_mock = MagicMock( return_value={ 'retcode': 0, 'stdout': '{"retcode": 0, "comment": "container cmd"}', 'stderr': 'err', }) docker_copy_to_mock = MagicMock(return_value={'retcode': 0}) client = Mock() client.put_archive = Mock() with nested( patch.dict(dockerng_mod.__opts__, {'cachedir': '/tmp'}), patch.dict( dockerng_mod.__salt__, { 'dockerng.run_all': docker_run_all_mock, 'dockerng.copy_to': docker_copy_to_mock, }), patch.dict(dockerng_mod.__context__, {'docker.client': client})): # call twice to verify tmp path later for i in range(2): ret = dockerng_mod.call('ID', 'test.arg', 1, 2, arg1='val1') # Check that the directory is different each time # [ call(name, [args]), ... self.assertIn('mkdir', docker_run_all_mock.mock_calls[0][1][1]) self.assertIn('mkdir', docker_run_all_mock.mock_calls[3][1][1]) self.assertNotEqual(docker_run_all_mock.mock_calls[0][1][1], docker_run_all_mock.mock_calls[3][1][1]) self.assertIn('salt-call', docker_run_all_mock.mock_calls[1][1][1]) self.assertIn('salt-call', docker_run_all_mock.mock_calls[4][1][1]) self.assertNotEqual(docker_run_all_mock.mock_calls[1][1][1], docker_run_all_mock.mock_calls[4][1][1]) # check directory cleanup self.assertIn('rm -rf', docker_run_all_mock.mock_calls[2][1][1]) self.assertIn('rm -rf', docker_run_all_mock.mock_calls[5][1][1]) self.assertNotEqual(docker_run_all_mock.mock_calls[2][1][1], docker_run_all_mock.mock_calls[5][1][1]) self.assertEqual({"retcode": 0, "comment": "container cmd"}, ret)
class PsTestCase(TestCase): def test__needs_change(self): self.assertTrue(cron._needs_change(True, False)) def test__needs_change_random(self): ''' Assert that if the new var is 'random' and old is '* that we return True ''' self.assertTrue(cron._needs_change('*', 'random')) ## Still trying to figure this one out. # def test__render_tab(self): # pass def test__get_cron_cmdstr(self): self.assertEqual('crontab /tmp', cron._get_cron_cmdstr(STUB_PATH)) def test__date_time_match(self): ''' Passes if a match is found on all elements. Note the conversions to strings here! :return: ''' self.assertTrue( cron._date_time_match(STUB_CRON_TIMESTAMP, minute=STUB_CRON_TIMESTAMP['minute'], hour=STUB_CRON_TIMESTAMP['hour'], daymonth=STUB_CRON_TIMESTAMP['daymonth'], dayweek=STUB_CRON_TIMESTAMP['dayweek'])) @patch('salt.modules.cron.raw_cron', new=MagicMock(return_value=STUB_SIMPLE_RAW_CRON)) def test_list_tab(self): self.assertDictEqual(STUB_SIMPLE_CRON_DICT, cron.list_tab('DUMMY_USER')) @patch('salt.modules.cron._write_cron_lines') @patch('salt.modules.cron.list_tab', new=MagicMock(return_value=STUB_SIMPLE_CRON_DICT)) def test_set_special(self, write_cron_lines_mock): expected_write_call = call('DUMMY_USER', [ '5 0 * * * /tmp/no_script.sh\n', '# Lines below here are managed by Salt, do not edit\n', '@hourly echo Hi!\n' ]) ret = cron.set_special('DUMMY_USER', '@hourly', 'echo Hi!') write_cron_lines_mock.assert_has_calls(expected_write_call) def test__get_cron_date_time(self): ret = cron._get_cron_date_time( minute=STUB_CRON_TIMESTAMP['minute'], hour=STUB_CRON_TIMESTAMP['hour'], daymonth=STUB_CRON_TIMESTAMP['daymonth'], dayweek=STUB_CRON_TIMESTAMP['dayweek'], month=STUB_CRON_TIMESTAMP['month']) self.assertDictEqual(ret, STUB_CRON_TIMESTAMP) ## FIXME: More sophisticated _get_cron_date_time checks should be added here. @patch('salt.modules.cron._write_cron_lines', new=MagicMock(return_value={'retcode': False})) @patch('salt.modules.cron.raw_cron', new=MagicMock(return_value=STUB_SIMPLE_RAW_CRON)) def test_set_job(self): cron.__grains__ = __grains__ with patch.dict(cron.__grains__, {'os': None}): cron.set_job('DUMMY_USER', 1, 2, 3, 4, 5, '/bin/echo NOT A DROID', 'WERE YOU LOOKING FOR ME?') expected_call = call('DUMMY_USER', [ '5 0 * * * /tmp/no_script.sh\n', '# Lines below here are managed by Salt, do not edit\n', '# WERE YOU LOOKING FOR ME?\n', '1 2 3 4 5 /bin/echo NOT A DROID\n' ]) cron._write_cron_lines.call_args.assert_called_with(expected_call) @patch('salt.modules.cron._write_cron_lines', new=MagicMock(return_value={'retcode': False})) @patch('salt.modules.cron.raw_cron', new=MagicMock(return_value=STUB_SIMPLE_RAW_CRON)) def test_rm_job_is_absent(self): with patch.dict(cron.__grains__, {'os': None}): ret = cron.rm_job('DUMMY_USER', '/bin/echo NOT A DROID', 1, 2, 3, 4, 5) self.assertEqual('absent', ret)
def test_sls_build(self, *args): ''' test build sls image. ''' docker_start_mock = MagicMock(return_value={}) docker_create_mock = MagicMock(return_value={ 'Id': 'ID', 'Name': 'NAME' }) docker_stop_mock = MagicMock(return_value={ 'state': { 'old': 'running', 'new': 'stopped' }, 'result': True }) docker_commit_mock = MagicMock(return_value={ 'Id': 'ID2', 'Image': 'foo', 'Time_Elapsed': 42 }) docker_sls_mock = MagicMock( return_value={ "file_|-/etc/test.sh_|-/etc/test.sh_|-managed": { "comment": "File /etc/test.sh is in the correct state", "name": "/etc/test.sh", "start_time": "07:04:26.834792", "result": True, "duration": 13.492, "__run_num__": 0, "changes": {} }, "test_|-always-passes_|-foo_|-succeed_without_changes": { "comment": "Success!", "name": "foo", "start_time": "07:04:26.848915", "result": True, "duration": 0.363, "__run_num__": 1, "changes": {} } }) ret = None with patch.dict( dockerng_mod.__salt__, { 'dockerng.start': docker_start_mock, 'dockerng.create': docker_create_mock, 'dockerng.stop': docker_stop_mock, 'dockerng.commit': docker_commit_mock, 'dockerng.sls': docker_sls_mock }): ret = dockerng_mod.sls_build( 'foo', mods='foo', ) docker_create_mock.assert_called_once_with( cmd='/usr/bin/sleep infinity', image='opensuse/python', interactive=True, tty=True) docker_start_mock.assert_called_once_with('ID') docker_sls_mock.assert_called_once_with('ID', 'foo', 'base') docker_stop_mock.assert_called_once_with('ID') docker_commit_mock.assert_called_once_with('ID', 'foo') self.assertEqual({ 'Id': 'ID2', 'Image': 'foo', 'Time_Elapsed': 42 }, ret)
class LocalFSTest(TestCase): ''' Validate the functions in the localfs cache ''' def _create_tmp_cache_file(self, tmp_dir, serializer): ''' Helper function that creates a temporary cache file using localfs.store. This is to used to create DRY unit tests for the localfs cache. ''' with patch.dict(localfs.__opts__, {'cachedir': tmp_dir}): with patch.dict(localfs.__context__, {'serial': serializer}): localfs.store(bank='bank', key='key', data='payload data', cachedir=tmp_dir) # 'store' function tests: 4 @patch('os.path.isdir', MagicMock(return_value=None)) @patch('os.makedirs', MagicMock(side_effect=OSError)) def test_store_no_base_cache_dir(self): ''' Tests that a SaltCacheError is raised when the base directory doesn't exist and cannot be created. ''' self.assertRaises(SaltCacheError, localfs.store, bank='', key='', data='', cachedir='') @patch('os.path.isdir', MagicMock(return_value=True)) @patch('tempfile.mkstemp', MagicMock(return_value=(12345, 'foo'))) def test_store_close_mkstemp_file_handle(self): ''' Tests that the file descriptor that is opened by os.open during the mkstemp call in localfs.store is closed before calling salt.utils.fopen on the filename. This test mocks the call to mkstemp, but forces an OSError to be raised when the close() function is called on a file descriptor that doesn't exist. ''' self.assertRaises(OSError, localfs.store, bank='', key='', data='', cachedir='') @patch('os.path.isdir', MagicMock(return_value=True)) @patch('tempfile.mkstemp', MagicMock(return_value=('one', 'two'))) @patch('os.close', MagicMock(return_value=None)) @patch('salt.utils.fopen', MagicMock(side_effect=IOError)) def test_store_error_writing_cache(self): ''' Tests that a SaltCacheError is raised when there is a problem writing to the cache file. ''' self.assertRaises(SaltCacheError, localfs.store, bank='', key='', data='', cachedir='') @destructiveTest def test_store_success(self): ''' Tests that the store function writes the data to the serializer for storage. ''' # Create a temporary cache dir tmp_dir = tempfile.mkdtemp(dir=integration.SYS_TMP_DIR) # Use the helper function to create the cache file using localfs.store() self._create_tmp_cache_file(tmp_dir, salt.payload.Serial(self)) # Read in the contents of the key.p file and assert "payload data" was written with salt.utils.fopen(tmp_dir + '/bank/key.p') as fh_: for line in fh_: self.assertIn('payload data', line) # 'fetch' function tests: 3 @patch('os.path.isfile', MagicMock(return_value=False)) def test_fetch_return_when_cache_file_does_not_exist(self): ''' Tests that the fetch function returns None when the cache key file doesn't exist. ''' self.assertIsNone(localfs.fetch(bank='', key='', cachedir='')) @patch('os.path.isfile', MagicMock(return_value=True)) @patch('salt.utils.fopen', MagicMock(side_effect=IOError)) def test_fetch_error_reading_cache(self): ''' Tests that a SaltCacheError is raised when there is a problem reading the cache file. ''' self.assertRaises(SaltCacheError, localfs.fetch, bank='', key='', cachedir='') @destructiveTest def test_fetch_success(self): ''' Tests that the fetch function is able to read the cache file and return its data. ''' # Create a temporary cache dir tmp_dir = tempfile.mkdtemp(dir=integration.SYS_TMP_DIR) # Create a new serializer object to use in function patches serializer = salt.payload.Serial(self) # Use the helper function to create the cache file using localfs.store() self._create_tmp_cache_file(tmp_dir, serializer) # Now fetch the data from the new cache key file with patch.dict(localfs.__opts__, {'cachedir': tmp_dir}): with patch.dict(localfs.__context__, {'serial': serializer}): self.assertIn( 'payload data', localfs.fetch(bank='bank', key='key', cachedir=tmp_dir)) # 'updated' function tests: 3 @patch('os.path.isfile', MagicMock(return_value=False)) def test_updated_return_when_cache_file_does_not_exist(self): ''' Tests that the updated function returns None when the cache key file doesn't exist. ''' self.assertIsNone(localfs.updated(bank='', key='', cachedir='')) @patch('os.path.isfile', MagicMock(return_value=True)) @patch('os.path.getmtime', MagicMock(side_effect=IOError)) def test_updated_error_when_reading_mtime(self): ''' Tests that a SaltCacheError is raised when there is a problem reading the mtime of the cache file. ''' self.assertRaises(SaltCacheError, localfs.updated, bank='', key='', cachedir='') @destructiveTest def test_updated_success(self): ''' Test that the updated function returns the modification time of the cache file ''' # Create a temporary cache dir tmp_dir = tempfile.mkdtemp(dir=integration.SYS_TMP_DIR) # Use the helper function to create the cache file using localfs.store() self._create_tmp_cache_file(tmp_dir, salt.payload.Serial(self)) with patch('os.path.join', MagicMock(return_value=tmp_dir + '/bank/key.p')): self.assertIsInstance( localfs.updated(bank='bank', key='key', cachedir=tmp_dir), int) # 'flush' function tests: 4 @patch('os.path.isdir', MagicMock(return_value=False)) def test_flush_key_is_none_and_no_target_dir(self): ''' Tests that the flush function returns False when no key is passed in and the target directory doesn't exist. ''' self.assertFalse(localfs.flush(bank='', key=None, cachedir='')) @patch('os.path.isfile', MagicMock(return_value=False)) def test_flush_key_provided_and_no_key_file_false(self): ''' Tests that the flush function returns False when a key file is provided but the target key file doesn't exist in the cache bank. ''' self.assertFalse(localfs.flush(bank='', key='key', cachedir='')) @patch('os.path.isfile', MagicMock(return_value=True)) def test_flush_success(self): ''' Tests that the flush function returns True when a key file is provided and the target key exists in the cache bank. ''' # Create a temporary cache dir tmp_dir = tempfile.mkdtemp(dir=integration.SYS_TMP_DIR) # Use the helper function to create the cache file using localfs.store() self._create_tmp_cache_file(tmp_dir, salt.payload.Serial(self)) # Now test the return of the flush function with patch.dict(localfs.__opts__, {'cachedir': tmp_dir}): self.assertTrue( localfs.flush(bank='bank', key='key', cachedir=tmp_dir)) @patch('os.path.isfile', MagicMock(return_value=True)) @patch('os.remove', MagicMock(side_effect=OSError)) def test_flush_error_raised(self): ''' Tests that a SaltCacheError is raised when there is a problem removing the key file from the cache bank ''' self.assertRaises(SaltCacheError, localfs.flush, bank='', key='key', cachedir='/var/cache/salt') # 'list' function tests: 3 @patch('os.path.isdir', MagicMock(return_value=False)) def test_list_no_base_dir(self): ''' Tests that the list function returns an empty list if the bank directory doesn't exist. ''' self.assertEqual(localfs.list_(bank='', cachedir=''), []) @patch('os.path.isdir', MagicMock(return_value=True)) @patch('os.listdir', MagicMock(side_effect=OSError)) def test_list_error_raised_no_bank_directory_access(self): ''' Tests that a SaltCacheError is raised when there is a problem accessing the cache bank directory. ''' self.assertRaises(SaltCacheError, localfs.list_, bank='', cachedir='') @destructiveTest def test_list_success(self): ''' Tests the return of the list function containing bank entries. ''' # Create a temporary cache dir tmp_dir = tempfile.mkdtemp(dir=integration.SYS_TMP_DIR) # Use the helper function to create the cache file using localfs.store() self._create_tmp_cache_file(tmp_dir, salt.payload.Serial(self)) # Now test the return of the list function with patch.dict(localfs.__opts__, {'cachedir': tmp_dir}): self.assertEqual(localfs.list_(bank='bank', cachedir=tmp_dir), ['key.p']) # 'contains' function tests: 1 @destructiveTest def test_contains(self): ''' Test the return of the contains function when key=None and when a key is provided. ''' # Create a temporary cache dir tmp_dir = tempfile.mkdtemp(dir=integration.SYS_TMP_DIR) # Use the helper function to create the cache file using localfs.store() self._create_tmp_cache_file(tmp_dir, salt.payload.Serial(self)) # Now test the return of the contains function when key=None with patch.dict(localfs.__opts__, {'cachedir': tmp_dir}): self.assertTrue( localfs.contains(bank='bank', key=None, cachedir=tmp_dir)) # Now test the return of the contains function when key='key' with patch.dict(localfs.__opts__, {'cachedir': tmp_dir}): self.assertTrue( localfs.contains(bank='bank', key='key', cachedir=tmp_dir))
def __call__(self, *args, **kwargs): return MagicMock(return_value=self.__return_value)()
def test_create_user(self): ''' Tests to create user accounts ''' self.assertFalse( drac.create_user('username', 'password', 'permissions', {'username': None})) mock = MagicMock(return_value=False) with patch.object(drac, '__execute_cmd', mock): mock = MagicMock(return_value=None) with patch.object(drac, 'delete_user', mock): self.assertFalse( drac.create_user('username', 'password', 'permissions', {'username1': { 'index': 1 }})) mock = MagicMock(return_value=True) with patch.object(drac, '__execute_cmd', mock): mock = MagicMock(return_value=False) with patch.object(drac, 'set_permissions', mock): mock = MagicMock(return_value=None) with patch.object(drac, 'delete_user', mock): self.assertFalse( drac.create_user('username', 'password', 'permissions', {'username1': { 'index': 1 }})) mock = MagicMock(return_value=True) with patch.object(drac, 'set_permissions', mock): mock = MagicMock(return_value=False) with patch.object(drac, 'change_password', mock): mock = MagicMock(return_value=None) with patch.object(drac, 'delete_user', mock): self.assertFalse( drac.create_user('username', 'password', 'permissions', {'username1': { 'index': 1 }})) mock = MagicMock(side_effect=[True, False]) with patch.object(drac, '__execute_cmd', mock): mock = MagicMock(return_value=True) with patch.object(drac, 'set_permissions', mock): mock = MagicMock(return_value=True) with patch.object(drac, 'change_password', mock): mock = MagicMock(return_value=None) with patch.object(drac, 'delete_user', mock): self.assertFalse( drac.create_user('username', 'password', 'permissions', {'username1': { 'index': 1 }})) mock = MagicMock(side_effect=[True, True]) with patch.object(drac, '__execute_cmd', mock): mock = MagicMock(return_value=True) with patch.object(drac, 'set_permissions', mock): mock = MagicMock(return_value=True) with patch.object(drac, 'change_password', mock): mock = MagicMock(return_value=None) with patch.object(drac, 'delete_user', mock): self.assertTrue( drac.create_user('username', 'password', 'permissions', {'username1': { 'index': 1 }}))
def test_upgrade_success(self): ''' Test system upgrade and dist-upgrade success. :return: ''' with patch('salt.modules.zypper.__zypper__.noraise.call', MagicMock()) as zypper_mock: with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{ "vim": "1.1" }, { "vim": "1.2" }])): ret = zypper.upgrade() self.assertDictEqual(ret, {"vim": { "old": "1.1", "new": "1.2" }}) zypper_mock.assert_any_call('update', '--auto-agree-with-licenses') with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{ "vim": "1.1" }, { "vim": "1.2" }])): ret = zypper.upgrade(dist_upgrade=True) self.assertDictEqual(ret, {"vim": { "old": "1.1", "new": "1.2" }}) zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses') with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{ "vim": "1.1" }, { "vim": "1.1" }])): ret = zypper.upgrade(dist_upgrade=True, dryrun=True) zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--dry-run') zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--dry-run', '--debug-solver') with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{ "vim": "1.1" }, { "vim": "1.1" }])): ret = zypper.upgrade(dist_upgrade=True, dryrun=True, fromrepo=["Dummy", "Dummy2"], novendorchange=True) zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--dry-run', '--from', "Dummy", '--from', 'Dummy2', '--no-allow-vendor-change') zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--dry-run', '--from', "Dummy", '--from', 'Dummy2', '--no-allow-vendor-change', '--debug-solver') with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{ "vim": "1.1" }, { "vim": "1.2" }])): ret = zypper.upgrade(dist_upgrade=True, fromrepo=["Dummy", "Dummy2"], novendorchange=True) self.assertDictEqual(ret, {"vim": { "old": "1.1", "new": "1.2" }}) zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--from', "Dummy", '--from', 'Dummy2', '--no-allow-vendor-change')
# Import Salt Testing libs from salttesting import skipIf, TestCase from salttesting.helpers import ensure_in_syspath from salttesting.mock import NO_MOCK, NO_MOCK_REASON, MagicMock, patch ensure_in_syspath('../../') # Import salt libs import salt.modules.rvm as rvm rvm.__salt__ = { 'cmd.has_exec': MagicMock(return_value=True), 'config.option': MagicMock(return_value=None) } @skipIf(NO_MOCK, NO_MOCK_REASON) class TestRvmModule(TestCase): def test__rvm(self): mock = MagicMock(return_value={'retcode': 0, 'stdout': ''}) with patch.dict(rvm.__salt__, {'cmd.run_all': mock}): rvm._rvm('install', '1.9.3') mock.assert_called_once_with( '/usr/local/rvm/bin/rvm install 1.9.3', runas=None) def test__rvm_do(self): mock = MagicMock(return_value=None) with patch.object(rvm, '_rvm', new=mock): rvm._rvm_do('1.9.3', 'gemset list') mock.assert_called_once_with('1.9.3 do gemset list', runas=None)
def test_system(self): ''' Test to ensure that global network settings are configured properly ''' ret = {'name': 'salt', 'changes': {}, 'result': False, 'comment': ''} with patch.dict(network.__opts__, {"test": True}): mock = MagicMock(side_effect=[AttributeError, False, False, 'As']) with patch.dict(network.__salt__, {"ip.get_network_settings": mock}): self.assertDictEqual(network.system('salt'), ret) mock = MagicMock(side_effect=[False, True, '']) with patch.dict(network.__salt__, {"ip.build_network_settings": mock}): ret.update({ 'comment': 'Global network settings' ' are up to date.', 'result': True }) self.assertDictEqual(network.system('salt'), ret) ret.update({ 'comment': 'Global network settings are set to' ' be added.', 'result': None }) self.assertDictEqual(network.system('salt'), ret) ret.update({ 'comment': 'Global network settings are set to' ' be updated:\n--- \n+++ \n@@ -1,2 +0,0' ' @@\n-A\n-s' }) self.assertDictEqual(network.system('salt'), ret) with patch.dict(network.__opts__, {"test": False}): mock = MagicMock(side_effect=[False, False]) with patch.dict(network.__salt__, {"ip.get_network_settings": mock}): mock = MagicMock(side_effect=[True, True]) with patch.dict(network.__salt__, {"ip.build_network_settings": mock}): mock = MagicMock(side_effect=[AttributeError, True]) with patch.dict(network.__salt__, {"ip.apply_network_settings": mock}): ret.update({ 'changes': { 'network_settings': 'Added global network' ' settings.' }, 'comment': '', 'result': False }) self.assertDictEqual(network.system('salt'), ret) ret.update({ 'changes': { 'network_settings': 'Added global network' ' settings.' }, 'comment': 'Global network settings' ' are up to date.', 'result': True }) self.assertDictEqual(network.system('salt'), ret)
def test__rvm(self): mock = MagicMock(return_value={'retcode': 0, 'stdout': ''}) with patch.dict(rvm.__salt__, {'cmd.run_all': mock}): rvm._rvm('install', '1.9.3') mock.assert_called_once_with( '/usr/local/rvm/bin/rvm install 1.9.3', runas=None)
class LocalemodTestCase(TestCase): ''' Test cases for salt.modules.localemod ''' def test_list_avail(self): ''' Test for Lists available (compiled) locales ''' with patch.dict(localemod.__salt__, {'cmd.run': MagicMock(return_value='A\nB')}): self.assertEqual(localemod.list_avail(), ['A', 'B']) def test_get_locale(self): ''' Test for Get the current system locale ''' with patch.dict(localemod.__grains__, {'os_family': ['Arch']}): with patch.object(localemod, '_locale_get', return_value=True): self.assertTrue(localemod.get_locale()) with patch.dict(localemod.__grains__, {'os_family': ['Gentoo']}): with patch.dict(localemod.__salt__, {'cmd.run': MagicMock(return_value='A')}): self.assertEqual(localemod.get_locale(), 'A') with patch.dict(localemod.__grains__, {'os_family': ['A']}): with patch.dict(localemod.__salt__, {'cmd.run': MagicMock(return_value='A=B')}): self.assertEqual(localemod.get_locale(), 'B') with patch.dict(localemod.__grains__, {'os_family': ['A']}): with patch.dict(localemod.__salt__, {'cmd.run': MagicMock(return_value='A')}): self.assertEqual(localemod.get_locale(), '') def test_set_locale(self): ''' Test for Sets the current system locale ''' with patch.dict(localemod.__grains__, {'os_family': ['Arch']}): with patch.object(localemod, '_localectl_set', return_value=True): self.assertTrue(localemod.set_locale('l')) with patch.dict(localemod.__grains__, {'os_family': ['Gentoo']}): with patch.dict(localemod.__salt__, {'cmd.retcode': MagicMock(return_value='A')}): self.assertFalse(localemod.set_locale('l')) with patch.dict(localemod.__grains__, {'os_family': ['A']}): self.assertTrue(localemod.set_locale('locale')) def test_avail(self): ''' Test for Check if a locale is available ''' with patch('salt.utils.locales.normalize_locale', MagicMock(return_value='en_US.UTF-8 UTF-8')): with patch.dict( localemod.__salt__, {'locale.list_avail': MagicMock(return_value=['A', 'B'])}): self.assertTrue(localemod.avail('locale')) @patch('salt.utils.which', MagicMock(return_value='/some/dir/path')) def test_gen_locale_not_valid(self): ''' Tests the return of gen_locale when the provided locale is not found ''' with patch.dict(localemod.__grains__, {'os': 'Debian'}): with patch.dict(localemod.__salt__, {'file.search': MagicMock(return_value=False)}): self.assertFalse(localemod.gen_locale('foo')) @patch('salt.utils.which', MagicMock(return_value='/some/dir/path')) def test_gen_locale_debian(self): ''' Tests the return of successful gen_locale on Debian system ''' ret = { 'stdout': 'saltines', 'stderr': 'biscuits', 'retcode': 0, 'pid': 1337 } with patch.dict(localemod.__grains__, {'os': 'Debian'}): with patch.dict( localemod.__salt__, { 'file.search': MagicMock(return_value=True), 'file.replace': MagicMock(return_value=True), 'cmd.run_all': MagicMock(return_value=ret) }): self.assertTrue(localemod.gen_locale('en_US.UTF-8 UTF-8')) @patch('salt.utils.which', MagicMock(return_value='/some/dir/path')) def test_gen_locale_debian_no_charmap(self): ''' Tests the return of successful gen_locale on Debian system without a charmap ''' def file_search(search, pattern, flags): ''' mock file.search ''' if len(pattern.split()) == 1: return False else: # charmap was supplied return True ret = { 'stdout': 'saltines', 'stderr': 'biscuits', 'retcode': 0, 'pid': 1337 } with patch.dict(localemod.__grains__, {'os': 'Debian'}): with patch.dict( localemod.__salt__, { 'file.search': file_search, 'file.replace': MagicMock(return_value=True), 'cmd.run_all': MagicMock(return_value=ret) }): self.assertTrue(localemod.gen_locale('en_US.UTF-8')) @patch('salt.utils.which', MagicMock(return_value='/some/dir/path')) @patch('os.listdir', MagicMock(return_value=['en_US'])) def test_gen_locale_ubuntu(self): ''' Test the return of successful gen_locale on Ubuntu system ''' ret = { 'stdout': 'saltines', 'stderr': 'biscuits', 'retcode': 0, 'pid': 1337 } with patch.dict( localemod.__salt__, { 'file.replace': MagicMock(return_value=True), 'file.touch': MagicMock(return_value=None), 'file.append': MagicMock(return_value=None), 'cmd.run_all': MagicMock(return_value=ret) }): with patch.dict(localemod.__grains__, {'os': 'Ubuntu'}): self.assertTrue(localemod.gen_locale('en_US.UTF-8')) @patch('os.listdir', MagicMock(return_value=['en_US.UTF-8'])) def test_gen_locale_gentoo(self): ''' Tests the return of successful gen_locale on Gentoo system ''' ret = { 'stdout': 'saltines', 'stderr': 'biscuits', 'retcode': 0, 'pid': 1337 } with patch.dict(localemod.__grains__, {'os_family': 'Gentoo'}): with patch.dict( localemod.__salt__, { 'file.search': MagicMock(return_value=True), 'file.replace': MagicMock(return_value=True), 'cmd.run_all': MagicMock(return_value=ret) }): self.assertTrue(localemod.gen_locale('en_US.UTF-8 UTF-8')) @patch('os.listdir', MagicMock(return_value=['en_US.UTF-8'])) def test_gen_locale_gentoo_no_charmap(self): ''' Tests the return of successful gen_locale on Gentoo system without a charmap ''' def file_search(search, pattern, flags): ''' mock file.search ''' if len(pattern.split()) == 1: return False else: # charmap was supplied return True ret = { 'stdout': 'saltines', 'stderr': 'biscuits', 'retcode': 0, 'pid': 1337 } with patch.dict(localemod.__grains__, {'os_family': 'Gentoo'}): with patch.dict( localemod.__salt__, { 'file.search': file_search, 'file.replace': MagicMock(return_value=True), 'cmd.run_all': MagicMock(return_value=ret) }): self.assertTrue(localemod.gen_locale('en_US.UTF-8')) @patch('salt.utils.which', MagicMock(return_value='/some/dir/path')) @patch('os.listdir', MagicMock(return_value=['en_US'])) def test_gen_locale(self): ''' Tests the return of successful gen_locale ''' ret = { 'stdout': 'saltines', 'stderr': 'biscuits', 'retcode': 0, 'pid': 1337 } with patch.dict( localemod.__salt__, { 'cmd.run_all': MagicMock(return_value=ret), 'file.replace': MagicMock() }): self.assertTrue(localemod.gen_locale('en_US.UTF-8')) @patch('salt.utils.which', MagicMock(return_value='/some/dir/path')) @patch('os.listdir', MagicMock(return_value=['en_US'])) def test_gen_locale_verbose(self): ''' Tests the return of successful gen_locale ''' ret = { 'stdout': 'saltines', 'stderr': 'biscuits', 'retcode': 0, 'pid': 1337 } with patch.dict( localemod.__salt__, { 'cmd.run_all': MagicMock(return_value=ret), 'file.replace': MagicMock() }): self.assertEqual(localemod.gen_locale('en_US.UTF-8', verbose=True), ret)
def test__rvm_do(self): mock = MagicMock(return_value=None) with patch.object(rvm, '_rvm', new=mock): rvm._rvm_do('1.9.3', 'gemset list') mock.assert_called_once_with('1.9.3 do gemset list', runas=None)
class PostgresSchemaTestCase(TestCase): @patch.dict( SALT_STUB, { 'postgres.schema_get': Mock(return_value=None), 'postgres.schema_create': MagicMock(), }) def test_present_creation(self): ret = postgres_schema.present('dbname', 'foo') self.assertEqual( ret, { 'comment': 'Schema foo has been created in database dbname', 'changes': { 'foo': 'Present' }, 'dbname': 'dbname', 'name': 'foo', 'result': True }) self.assertEqual(SALT_STUB['postgres.schema_create'].call_count, 1) @patch.dict( SALT_STUB, { 'postgres.schema_get': Mock(return_value={'foo': { 'acl': '', 'owner': 'postgres' }}), 'postgres.schema_create': MagicMock(), }) def test_present_nocreation(self): ret = postgres_schema.present('dbname', 'foo') self.assertEqual( ret, { 'comment': 'Schema foo already exists in database dbname', 'changes': {}, 'dbname': 'dbname', 'name': 'foo', 'result': True }) self.assertEqual(SALT_STUB['postgres.schema_create'].call_count, 0) @patch.dict( SALT_STUB, { 'postgres.schema_exists': Mock(return_value=True), 'postgres.schema_remove': MagicMock(), }) def test_absent_remove(self): ret = postgres_schema.absent('dbname', 'foo') self.assertEqual( ret, { 'comment': 'Schema foo has been removed from database dbname', 'changes': { 'foo': 'Absent' }, 'dbname': 'dbname', 'name': 'foo', 'result': True }) self.assertEqual(SALT_STUB['postgres.schema_remove'].call_count, 1) @patch.dict( SALT_STUB, { 'postgres.schema_exists': Mock(return_value=False), 'postgres.schema_remove': MagicMock(), }) def test_absent_noremove(self): ret = postgres_schema.absent('dbname', 'foo') self.assertEqual( ret, { 'comment': 'Schema foo is not present in database dbname,' ' so it cannot be removed', 'changes': {}, 'dbname': 'dbname', 'name': 'foo', 'result': True }) self.assertEqual(SALT_STUB['postgres.schema_remove'].call_count, 0)
def test_install(self): mock = MagicMock(return_value={'retcode': 0}) with patch.dict(rvm.__salt__, {'cmd.run_all': mock}): rvm.install()
class ExtendedTestCase(TestCase): ''' Extended TestCase class containing additional helper methods. ''' def assertRaisesWithMessage(self, exc_type, exc_msg, func, *args, **kwargs): try: func(*args, **kwargs) self.assertFail() except Exception as exc: self.assertEqual(type(exc), exc_type) self.assertEqual(exc.message, exc_msg) @skipIf(NO_MOCK, NO_MOCK_REASON) @patch('salt.cloud.clouds.dimensiondata.__virtual__', MagicMock(return_value='dimensiondata')) class DimensionDataTestCase(ExtendedTestCase): ''' Unit TestCase for salt.cloud.clouds.dimensiondata module. ''' def test_avail_images_call(self): ''' Tests that a SaltCloudSystemExit is raised when trying to call avail_images with --action or -a. ''' self.assertRaises( SaltCloudSystemExit, dimensiondata.avail_images, call='action' )
def setUp(self): parted.__salt__ = {'cmd.run': MagicMock()} self.cmdrun = parted.__salt__['cmd.run'] self.maxDiff = None