def test_existing_binary_in_windows_pathext(self, osaccess): # We define the side_effect attribute on the mocked object in order to # specify which calls return which values. First call to os.access # returns X, the second Y, the third Z, etc... osaccess.side_effect = [ # The first os.access should return False(the abspath one) False, # The second, iterating through $PATH, should also return False, # still checking for Linux False, # We will now also return False 3 times so we get a .CMD back from # the function, see PATHEXT below. # Lastly return True, this is the windows check. False, False, False, True ] # Let's patch os.environ to provide a custom PATH variable with patch.dict(os.environ, {'PATH': '/bin', 'PATHEXT': '.COM;.EXE;.BAT;.CMD;.VBS;' '.VBE;.JS;.JSE;.WSF;.WSH;.MSC;.PY'}): # Let's also patch is_windows to return True with patch('salt.utils.is_windows', lambda: True): with patch('os.path.isfile', lambda x: True): self.assertEqual( salt.utils.which('this-binary-exists-under-windows'), # The returned path should return the .exe suffix '/bin/this-binary-exists-under-windows.CMD' )
def test_proxy_minion_daemon_hash_type_verified(self): ''' Verify if ProxyMinion is verifying hash_type config option. :return: ''' def _create_proxy_minion(): ''' Create proxy minion instance :return: ''' obj = daemons.ProxyMinion() obj.config = {'user': '******', 'hash_type': alg} for attr in ['minion', 'start_log_info', 'prepare', 'shutdown', 'tune_in']: setattr(obj, attr, MagicMock()) obj.minion.restart = False return obj _logger = LoggerMock() with patch('salt.cli.daemons.check_user', MagicMock(return_value=True)): with patch('salt.cli.daemons.log', _logger): for alg in ['md5', 'sha1']: _create_proxy_minion().start() self.assertTrue(_logger.messages) self.assertTrue(_logger.has_message('Do not use {alg}'.format(alg=alg), log_type='warning')) _logger.reset() for alg in ['sha224', 'sha256', 'sha384', 'sha512']: _create_proxy_minion().start() self.assertTrue(_logger.messages) self.assertFalse(_logger.has_message('Do not use '))
def test_push_success(self): ''' Test if push succeeds. ''' path = '/srv/salt/saltines' file_data = '' mock_buf_size = len(file_data) mock_id = 'You don\'t need to see his identification.' ret = True class MockChannel(object): @staticmethod def factory(__opts__): return MockChannel() def send(self, load): return 'channel info' class MockAuth(object): def gen_token(self, salt): return 'token info' def mock_auth_factory(): return MockAuth() with patch('salt.transport.Channel', MockChannel): with patch('salt.modules.cp._auth', mock_auth_factory): with patch('salt.utils.fopen', mock_open(read_data=file_data)): with patch.dict(cp.__opts__, {'file_buffer_size': mock_buf_size, 'id': mock_id}): self.assertEqual(cp.push(path), ret)
def test_manage_devices_just_cd(self): ''' Tests that when adding IDE/CD drives, controller keys will be in the apparent safe-range on ESX 5.5 but randomly generated on other versions (i.e. 6) ''' device_map = { 'ide': { 'IDE 0': {}, 'IDE 1': {} }, 'cd': { 'CD/DVD Drive 1': {'controller': 'IDE 0'} } } with patch('salt.cloud.clouds.vmware.get_vcenter_version', return_value='VMware ESXi 5.5.0'): specs = vmware._manage_devices(device_map, vm=None)['device_specs'] self.assertEqual(specs[0].device.key, vmware.SAFE_ESX_5_5_CONTROLLER_KEY_INDEX) self.assertEqual(specs[1].device.key, vmware.SAFE_ESX_5_5_CONTROLLER_KEY_INDEX+1) self.assertEqual(specs[2].device.controllerKey, vmware.SAFE_ESX_5_5_CONTROLLER_KEY_INDEX) with patch('salt.cloud.clouds.vmware.get_vcenter_version', return_value='VMware ESXi 6'): with patch('salt.cloud.clouds.vmware.randint', return_value=100) as first_key: specs = vmware._manage_devices(device_map, vm=None)['device_specs'] self.assertEqual(specs[0].device.key, first_key.return_value) self.assertEqual(specs[2].device.controllerKey, first_key.return_value)
def test_master_daemon_hash_type_verified(self): ''' Verify if Master is verifying hash_type config option. :return: ''' def _create_master(): ''' Create master instance :return: ''' master = daemons.Master() master.config = {'user': '******', 'hash_type': alg} for attr in ['master', 'start_log_info', 'prepare']: setattr(master, attr, MagicMock()) return master _logger = LoggerMock() with patch('salt.cli.daemons.check_user', MagicMock(return_value=True)): with patch('salt.cli.daemons.log', _logger): for alg in ['md5', 'sha1']: _create_master().start() self.assertTrue(_logger.messages) self.assertTrue(_logger.has_message('Do not use {alg}'.format(alg=alg), log_type='warning')) _logger.reset() for alg in ['sha224', 'sha256', 'sha384', 'sha512']: _create_master().start() self.assertTrue(_logger.messages) self.assertFalse(_logger.has_message('Do not use '))
def test_delete_object(self): ''' Deleting an object from the store. :return: ''' with patch("gzip.open", MagicMock()): with patch("csv.reader", MagicMock(return_value=iter([[], ['foo:int', 'bar:str', 'spam:float'], ['123', 'test', '0.123'], ['234', 'another', '0.456']]))): class InterceptedCsvDB(CsvDB): def __init__(self, path): CsvDB.__init__(self, path) self._remained = list() def store(self, obj, distinct=False): self._remained.append(obj) csvdb = InterceptedCsvDB('/foobar') csvdb.open() csvdb.create_table_from_object = MagicMock() csvdb.flush = MagicMock() assert csvdb.delete(FoobarEntity, eq={'foo': 123}) is True assert len(csvdb._remained) == 1 assert csvdb._remained[0].foo == 234 assert csvdb._remained[0].bar == 'another' assert csvdb._remained[0].spam == 0.456
def test_upgrade_failure(self): ''' Test system upgrade failure. :return: ''' zypper_out = ''' Loading repository data... Reading installed packages... Computing distribution upgrade... Use 'zypper repos' to get the list of defined repositories. Repository 'DUMMY' not found by its alias, number, or URI. ''' class FailingZypperDummy(object): def __init__(self): self.stdout = zypper_out self.stderr = "" self.pid = 1234 self.exit_code = 555 self.noraise = MagicMock() self.SUCCESS_EXIT_CODES = [0] def __call__(self, *args, **kwargs): return self with patch('salt.modules.zypper.__zypper__', FailingZypperDummy()) as zypper_mock: zypper_mock.noraise.call = MagicMock() with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}])): with self.assertRaises(CommandExecutionError) as cmd_exc: ret = zypper.upgrade(dist_upgrade=True, fromrepo=["DUMMY"]) self.assertEqual(cmd_exc.exception.info['changes'], {}) self.assertEqual(cmd_exc.exception.info['result']['stdout'], zypper_out) zypper_mock.noraise.call.assert_called_with('dist-upgrade', '--auto-agree-with-licenses', '--from', 'DUMMY')
def test_cluster_found(self): with patch('salt.utils.vmware.get_managed_object_name', MagicMock(return_value='fake_dc')): with patch('salt.utils.vmware.get_mors_with_properties', MagicMock(return_value=self.mock_entries)): res = vmware.get_cluster(self.mock_dc, 'fake_cluster2') self.assertEqual(res, self.mock_cluster2)
def test_pkg(self): ''' Test to execute a packaged state run ''' mock = MagicMock(side_effect=[False, True, True, True, True, True]) with patch.object(os.path, 'isfile', mock): self.assertEqual(state.pkg("/tmp/state_pkg.tgz", "", "md5"), {}) mock = MagicMock(side_effect=[False, 0, 0, 0, 0]) with patch.object(salt.utils, 'get_hash', mock): self.assertDictEqual(state.pkg("/tmp/state_pkg.tgz", "", "md5"), {}) self.assertDictEqual(state.pkg("/tmp/state_pkg.tgz", 0, "md5"), {}) MockTarFile.path = "" MockJson.flag = True with patch('salt.utils.fopen', mock_open()): self.assertListEqual(state.pkg("/tmp/state_pkg.tgz", 0, "md5"), [True]) MockTarFile.path = "" MockJson.flag = False with patch('salt.utils.fopen', mock_open()): self.assertTrue(state.pkg("/tmp/state_pkg.tgz", 0, "md5"))
def test_sdecode(self): b = '\xe7\xb9\x81\xe4\xbd\x93' if six.PY2 else bytes((0xe7, 0xb9, 0x81, 0xe4, 0xbd, 0x93)) u = u'\u7e41\u4f53' with patch('salt.utils.locales.get_encodings', return_value=['ascii']): self.assertEqual(locales.sdecode(b), b) # no decode with patch('salt.utils.locales.get_encodings', return_value=['utf-8']): self.assertEqual(locales.sdecode(b), u)
def test_freebsd_remotes_on(self): with patch('salt.utils.is_sunos', lambda: False): with patch('salt.utils.is_freebsd', lambda: True): with patch('subprocess.check_output', return_value=FREEBSD_SOCKSTAT): remotes = network._freebsd_remotes_on('4506', 'remote') self.assertEqual(remotes, set(['127.0.0.1']))
def test_remove(self): ''' Tests to remove the specified kernel module ''' mod = 'cheese' err_msg = 'Cannot find module: it has been eaten' mock_persist = MagicMock(return_value=set([mod])) mock_lsmod = MagicMock(return_value=[{'size': 100, 'module': None, 'depcount': 10, 'deps': None}]) mock_run_all_0 = MagicMock(return_value={'retcode': 0}) mock_run_all_1 = MagicMock(return_value={'retcode': 1, 'stderr': err_msg}) with patch('salt.modules.kmod._remove_persistent_module', mock_persist): with patch('salt.modules.kmod.lsmod', mock_lsmod): with patch.dict(kmod.__salt__, {'cmd.run_all': mock_run_all_0}): self.assertEqual([mod], kmod.remove(mod, True)) self.assertEqual([], kmod.remove(mod)) with patch.dict(kmod.__salt__, {'cmd.run_all': mock_run_all_1}): self.assertEqual('Error removing module {0}: {1}'.format(mod, err_msg), kmod.remove(mod, True))
def test_wait_for_task_call(self): mock_wait_for_task = MagicMock() with patch('salt.utils.vmware.get_managed_object_name', MagicMock(return_value='fake_cluster')): with patch('salt.utils.vmware.wait_for_task', mock_wait_for_task): vmware.update_cluster(self.mock_cluster, self.mock_cluster_spec) mock_wait_for_task.assert_called_once_with( self.mock_task, 'fake_cluster', 'ClusterUpdateTask')
def test_cluster_not_found(self): with patch('salt.utils.vmware.get_managed_object_name', MagicMock(return_value='fake_dc')): with patch('salt.utils.vmware.get_mors_with_properties', MagicMock(return_value=self.mock_entries)): with self.assertRaises(VMwareObjectRetrievalError) as excinfo: vmware.get_cluster(self.mock_dc, 'fake_cluster') self.assertEqual(excinfo.exception.strerror, 'Cluster \'fake_cluster\' was not found in ' 'datacenter \'fake_dc\'')
def test_mod_list(self): """ Tests return a list of the loaded module names """ with patch("salt.modules.kmod._get_modules_conf", MagicMock(return_value="/etc/modules")): with patch("salt.modules.kmod._strip_module_name", MagicMock(return_value="lp")): self.assertListEqual(["lp"], kmod.mod_list(True)) mock_ret = [{"size": 100, "module": None, "depcount": 10, "deps": None}] with patch("salt.modules.kmod.lsmod", MagicMock(return_value=mock_ret)): self.assertListEqual([None], kmod.mod_list(False))
def test__get_gpg_exec(self): ''' test _get_gpg_exec ''' gpg_exec = '/bin/gpg' with patch('salt.utils.which', MagicMock(return_value=gpg_exec)): self.assertEqual(gpg._get_gpg_exec(), gpg_exec) with patch('salt.utils.which', MagicMock(return_value=False)): self.assertRaises(SaltRenderError, gpg._get_gpg_exec)
def test_defined_container_ref(self): container_ref_mock = MagicMock() with patch('salt.utils.vmware.get_root_folder', self.get_root_folder_mock): with patch(self.obj_spec_method_name, self.obj_type_mock): salt.utils.vmware.get_content( self.si_mock, self.obj_type_mock, container_ref=container_ref_mock) self.assertEqual(self.get_root_folder_mock.call_count, 0) self.create_container_view_mock.assert_called_once_with( container_ref_mock, [self.obj_type_mock], True)
def _run_suse_os_grains_tests(self, os_release_map): path_isfile_mock = MagicMock(side_effect=lambda x: x in os_release_map['files']) empty_mock = MagicMock(return_value={}) osarch_mock = MagicMock(return_value="amd64") os_release_mock = MagicMock(return_value=os_release_map.get('os_release_file')) orig_import = __import__ if six.PY2: built_in = '__builtin__' else: built_in = 'builtins' def _import_mock(name, *args): if name == 'lsb_release': raise ImportError('No module named lsb_release') return orig_import(name, *args) # Skip the first if statement with patch.object(salt.utils, 'is_proxy', MagicMock(return_value=False)): # Skip the selinux/systemd stuff (not pertinent) with patch.object(core, '_linux_bin_exists', MagicMock(return_value=False)): # Skip the init grain compilation (not pertinent) with patch.object(os.path, 'exists', path_isfile_mock): # Ensure that lsb_release fails to import with patch('{0}.__import__'.format(built_in), side_effect=_import_mock): # Skip all the /etc/*-release stuff (not pertinent) with patch.object(os.path, 'isfile', path_isfile_mock): with patch.object(core, '_parse_os_release', os_release_mock): # Mock platform.linux_distribution to give us the # OS name that we want. distro_mock = MagicMock( return_value=('SUSE test', 'version', 'arch') ) with patch("salt.utils.fopen", mock_open()) as suse_release_file: suse_release_file.return_value.__iter__.return_value = os_release_map.get('suse_release_file', '').splitlines() with patch.object(platform, 'linux_distribution', distro_mock): with patch.object(core, '_linux_gpu_data', empty_mock): with patch.object(core, '_linux_cpudata', empty_mock): with patch.object(core, '_virtual', empty_mock): # Mock the osarch with patch.dict(core.__salt__, {'cmd.run': osarch_mock}): os_grains = core.os_data() self.assertEqual(os_grains.get('os'), 'SUSE') self.assertEqual(os_grains.get('os_family'), 'Suse') self.assertEqual(os_grains.get('osfullname'), os_release_map['osfullname']) self.assertEqual(os_grains.get('oscodename'), os_release_map['oscodename']) self.assertEqual(os_grains.get('osrelease'), os_release_map['osrelease']) self.assertListEqual(list(os_grains.get('osrelease_info')), os_release_map['osrelease_info']) self.assertEqual(os_grains.get('osmajorrelease'), os_release_map['osmajorrelease'])
def test_get_service_instance_from_managed_object(self): mock_dc_name = MagicMock() mock_get_service_instance_from_managed_object = MagicMock() with patch('salt.utils.vmware.get_managed_object_name', MagicMock(return_value=mock_dc_name)): with patch( 'salt.utils.vmware.get_service_instance_from_managed_object', mock_get_service_instance_from_managed_object): vmware.get_cluster(self.mock_dc, 'fake_cluster') mock_get_service_instance_from_managed_object.assert_called_once_with( self.mock_dc, name=mock_dc_name)
def test_gen_keys(self): with patch('salt.utils.fopen', mock_open()): open_priv_wb = call('/keydir/keyname.pem', 'wb+') open_pub_wb = call('/keydir/keyname.pub', 'wb+') with patch('os.path.isfile', return_value=True): self.assertEqual(crypt.gen_keys('/keydir', 'keyname', 2048), '/keydir/keyname.pem') self.assertNotIn(open_priv_wb, salt.utils.fopen.mock_calls) self.assertNotIn(open_pub_wb, salt.utils.fopen.mock_calls) with patch('os.path.isfile', return_value=False): with patch('salt.utils.fopen', mock_open()): crypt.gen_keys('/keydir', 'keyname', 2048) salt.utils.fopen.assert_has_calls([open_priv_wb, open_pub_wb], any_order=True)
def test_render(self): ''' test render ''' key_dir = '/etc/salt/gpgkeys' secret = 'Use more salt.' crypted = '-----BEGIN PGP MESSAGE-----!@#$%^&*()_+' with patch('salt.renderers.gpg._get_gpg_exec', MagicMock(return_value=True)): with patch('salt.renderers.gpg._get_key_dir', MagicMock(return_value=key_dir)): with patch('salt.renderers.gpg._decrypt_object', MagicMock(return_value=secret)): self.assertEqual(gpg.render(crypted), secret)
def test_get_file_str_success(self): ''' Test if get_file_str succeeds. ''' path = 'salt://saltines' dest = '/srv/salt/cheese/saltines' file_data = 'Remember to keep your files well salted.' saltenv = 'base' ret = file_data with patch('salt.utils.fopen', mock_open(read_data=file_data)): with patch('salt.modules.cp.cache_file', MagicMock(return_value=dest)): self.assertEqual(cp.get_file_str(path, dest), ret)
def test_summary(self): ''' Test to show a summary of the last puppet agent run ''' mock_lst = MagicMock(return_value=[]) with patch.dict(puppet.__salt__, {'cmd.run': mock_lst}): with patch('salt.utils.fopen', mock_open(read_data="resources: 1")): self.assertDictEqual(puppet.summary(), {'resources': 1}) with patch('salt.utils.fopen', mock_open()) as m_open: m_open.side_effect = IOError(13, 'Permission denied:', '/file') self.assertRaises(CommandExecutionError, puppet.summary)
def test_local_properties_set(self): container_ref_mock = MagicMock() with patch(self.traversal_spec_method_name, self.traversal_spec_mock): with patch(self.property_spec_method_name, self.property_spec_mock): with patch(self.obj_spec_method_name, self.obj_spec_mock): salt.utils.vmware.get_content( self.si_mock, self.obj_type_mock, container_ref=container_ref_mock, local_properties=True) self.assertEqual(self.traversal_spec_mock.call_count, 0) self.obj_spec_mock.assert_called_once_with( obj=container_ref_mock, skip=False, selectSet=None)
def test_mod_list(self): ''' Tests return a list of the loaded module names ''' with patch('salt.modules.kmod._get_modules_conf', MagicMock(return_value='/etc/modules')): with patch('salt.modules.kmod._strip_module_name', MagicMock(return_value='lp')): self.assertListEqual(['lp'], kmod.mod_list(True)) mock_ret = [{'size': 100, 'module': None, 'depcount': 10, 'deps': None}] with patch('salt.modules.kmod.lsmod', MagicMock(return_value=mock_ret)): self.assertListEqual([None], kmod.mod_list(False))
def test_summary(self): ''' Test to show a summary of the last puppet agent run ''' mock_lst = MagicMock(return_value=[]) with patch.dict(puppet.__salt__, {'cmd.run': mock_lst}): with patch('salt.utils.fopen', mock_open(read_data="resources: 1")): self.assertDictEqual(puppet.summary(), {'resources': 1}) with patch('salt.utils.fopen', mock_open()) as m_open: helper_open = m_open() helper_open.write.assertRaises(CommandExecutionError, puppet.summary)
def test_install_already_there(self): # Given source = "/foo/bar/fubar.pkg" package_id = "com.foo.fubar.pkg" # When with patch("salt.modules.mac_pkgutil.is_installed", return_value=True): with patch("salt.modules.mac_pkgutil._install_from_path", return_value=True) as _install_from_path: mac_pkgutil.install(source, package_id) # Then self.assertEqual(_install_from_path.called, 0)
def test_install(self): # Given source = "/foo/bar/fubar.pkg" package_id = "com.foo.fubar.pkg" # When with patch("salt.modules.mac_pkgutil.is_installed", return_value=False): with patch("salt.modules.mac_pkgutil._install_from_path", return_value=True) as _install_from_path: mac_pkgutil.install(source, package_id) # Then _install_from_path.assert_called_with(source)
def test_pub(self): # Make sure we cleanly return if the publisher isn't running with patch('os.path.exists', return_value=False): self.assertRaises(SaltClientError, lambda: self.client.pub('*', 'test.ping')) # Check nodegroups behavior with patch('os.path.exists', return_value=True): with patch.dict(self.client.opts, {'nodegroups': {'group1': '[email protected],bar.domain.com,baz.domain.com or bl*.domain.com'}}): # Do we raise an exception if the nodegroup can't be matched? self.assertRaises(SaltInvocationError, self.client.pub, 'non_existent_group', 'test.ping', expr_form='nodegroup')
def test_cache_local_file_already_cached(self): ''' Test if cache_local_file handles an already cached file. ''' path = 'saltk//saltines' dest_file = '/srv/salt/cheese/saltines' mock_hash = {'hsum': 'deadbeef'} ret = dest_file with patch('salt.modules.cp.hash_file', MagicMock(return_value=mock_hash)): with patch('salt.modules.cp.is_cached', MagicMock(return_value=dest_file)): self.assertEqual(cp.cache_local_file(path), ret)
def test_get_managed_object_name(self): mock_get_managed_object_name = MagicMock() with patch('salt.utils.vmware.get_managed_object_name', mock_get_managed_object_name): vmware.update_cluster(self.mock_cluster, self.mock_cluster_spec) mock_get_managed_object_name.assert_called_once_with(self.mock_cluster)
def test_init(self): with patch('salt.utils.compat.pack_dunder', return_value=False) as dunder: libcloud_dns.__init__(None) dunder.assert_called_with('salt.modules.libcloud_dns')
def test_get_all_host_flag_not_set_and_no_host_names(self): with patch('salt.utils.vmware.get_mors_with_properties', MagicMock(return_value=self.mock_prop_hosts)): res = salt.utils.vmware.get_hosts(self.mock_si) self.assertEqual(res, [])
def test_filter_hostname(self): with patch('salt.utils.vmware.get_mors_with_properties', MagicMock(return_value=self.mock_prop_hosts)): res = salt.utils.vmware.get_hosts( self.mock_si, host_names=['fake_hostname1', 'fake_hostname2']) self.assertEqual(res, [self.mock_host1, self.mock_host2])
def test_check_or_die(self): self.assertRaises(CommandNotFoundError, utils.check_or_die, None) with patch('salt.utils.which', return_value=False): self.assertRaises(CommandNotFoundError, utils.check_or_die, 'FAKE COMMAND')
def test_nested_object_is_decrypted(self): encrypted_o = self.make_nested_object(ENCRYPTED_STRING) decrypted_o = self.make_nested_object(DECRYPTED_STRING) with patch('gnupg.GPG.decrypt', return_value=self.make_decryption_mock()): new_o = self.render_sls(encrypted_o) self.assertEqual(new_o, decrypted_o)
def test__need_changes_old(self): ''' old behavior; ID has no special action - If an id is found, it will be added as a new crontab even if there is a cmd that looks like this one - no comment, delete the cmd and readd it - comment: idem ''' with patch( 'salt.modules.cron.raw_cron', new=MagicMock(side_effect=get_crontab) ): set_crontab(L + '* * * * * ls\n\n') cron.set_job( user='******', minute='*', hour='*', daymonth='*', month='*', dayweek='*', cmd='ls', comment=None, identifier=cron.SALT_CRON_NO_IDENTIFIER, ) c1 = get_crontab() set_crontab(L + '* * * * * ls\n') self.assertEqual( c1, '# Lines below here are managed by Salt, do not edit\n' '* * * * * ls\n' '\n' ) cron.set_job( user='******', minute='*', hour='*', daymonth='*', month='*', dayweek='*', cmd='ls', comment='foo', identifier=cron.SALT_CRON_NO_IDENTIFIER, ) c2 = get_crontab() self.assertEqual( c2, '# Lines below here are managed by Salt, do not edit\n' '# foo\n* * * * * ls' ) set_crontab(L + '* * * * * ls\n') cron.set_job( user='******', minute='*', hour='*', daymonth='*', month='*', dayweek='*', cmd='lsa', comment='foo', identifier='bar', ) c3 = get_crontab() self.assertEqual( c3, '# Lines below here are managed by Salt, do not edit\n' '* * * * * ls\n' '# foo SALT_CRON_IDENTIFIER:bar\n' '* * * * * lsa' ) set_crontab(L + '* * * * * ls\n') cron.set_job( user='******', minute='*', hour='*', daymonth='*', month='*', dayweek='*', cmd='foo', comment='foo', identifier='bar', ) c4 = get_crontab() self.assertEqual( c4, '# Lines below here are managed by Salt, do not edit\n' '* * * * * ls\n' '# foo SALT_CRON_IDENTIFIER:bar\n' '* * * * * foo' ) set_crontab(L + '* * * * * ls\n') cron.set_job( user='******', minute='*', hour='*', daymonth='*', month='*', dayweek='*', cmd='ls', comment='foo', identifier='bbar', ) c4 = get_crontab() self.assertEqual( c4, '# Lines below here are managed by Salt, do not edit\n' '# foo SALT_CRON_IDENTIFIER:bbar\n' '* * * * * ls' )
def test_extracted_tar(self): ''' archive.extracted tar options ''' source = 'file.tar.gz' tmp_dir = os.path.join(tempfile.gettempdir(), 'test_archive') test_tar_opts = [ '--no-anchored foo', 'v -p --opt', '-v -p', '--long-opt -z', 'z -v -weird-long-opt arg', ] ret_tar_opts = [ ['tar', 'x', '--no-anchored', 'foo', '-f'], ['tar', 'xv', '-p', '--opt', '-f'], ['tar', 'x', '-v', '-p', '-f'], ['tar', 'x', '--long-opt', '-z', '-f'], ['tar', 'xz', '-v', '-weird-long-opt', 'arg', '-f'], ] mock_true = MagicMock(return_value=True) mock_false = MagicMock(return_value=False) ret = { 'stdout': ['saltines', 'cheese'], 'stderr': 'biscuits', 'retcode': '31337', 'pid': '1337' } mock_run = MagicMock(return_value=ret) with patch('os.path.exists', mock_true): with patch.dict(archive.__opts__, { 'test': False, 'cachedir': tmp_dir }): with patch.dict( archive.__salt__, { 'file.directory_exists': mock_false, 'file.file_exists': mock_false, 'file.makedirs': mock_true, 'cmd.run_all': mock_run }): if HAS_PWD: running_as = pwd.getpwuid(os.getuid()).pw_name else: running_as = 'root' filename = os.path.join( tmp_dir, 'files/test/_tmp{0}_test_archive.tar'.format( '' if running_as == 'root' else '_{0}'.format(running_as))) for test_opts, ret_opts in zip(test_tar_opts, ret_tar_opts): ret = archive.extracted(tmp_dir, source, 'tar', tar_options=test_opts) ret_opts.append(filename) mock_run.assert_called_with(ret_opts, cwd=tmp_dir, python_shell=False)
def test_encrypted_string_is_decrypted(self): with patch('gnupg.GPG.decrypt', return_value=self.make_decryption_mock()): new_s = self.render_sls(ENCRYPTED_STRING) self.assertEqual(new_s, DECRYPTED_STRING)
def test_error_logged_if_process_get_owner_fails(self): with patch('salt.modules.win_status.log') as log: self.call_procs() log.warning.assert_called_once_with(ANY) self.assertIn(str(self.expected_error_code), log.warning.call_args[0][0])
def test_get_managed_object_name_call(self): mock_get_managed_object_name = MagicMock() with patch('salt.utils.vmware.get_managed_object_name', mock_get_managed_object_name): vmware.get_cluster(self.mock_dc, 'fake_cluster') mock_get_managed_object_name.assert_called_once_with(self.mock_dc)
def test_one_host_returned(self): with patch('salt.utils.vmware.get_mors_with_properties', MagicMock(return_value=[self.mock_prop_host1])): res = salt.utils.vmware.get_hosts(self.mock_si, get_all_hosts=True) self.assertEqual(res, [self.mock_host1])
def test_upgrade_success(self): ''' Test system upgrade and dist-upgrade success. :return: ''' with patch('salt.modules.zypper.__zypper__.noraise.call', MagicMock()) as zypper_mock: with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{ "vim": "1.1" }, { "vim": "1.2" }])): ret = zypper.upgrade() self.assertDictEqual(ret, {"vim": { "old": "1.1", "new": "1.2" }}) zypper_mock.assert_any_call('update', '--auto-agree-with-licenses') with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{ "vim": "1.1" }, { "vim": "1.2" }])): ret = zypper.upgrade(dist_upgrade=True) self.assertDictEqual(ret, {"vim": { "old": "1.1", "new": "1.2" }}) zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses') with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{ "vim": "1.1" }, { "vim": "1.1" }])): ret = zypper.upgrade(dist_upgrade=True, dryrun=True) zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--dry-run') zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--dry-run', '--debug-solver') with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{ "vim": "1.1" }, { "vim": "1.1" }])): ret = zypper.upgrade(dist_upgrade=True, dryrun=True, fromrepo=["Dummy", "Dummy2"], novendorchange=True) zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--dry-run', '--from', "Dummy", '--from', 'Dummy2', '--no-allow-vendor-change') zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--dry-run', '--from', "Dummy", '--from', 'Dummy2', '--no-allow-vendor-change', '--debug-solver') with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{ "vim": "1.1" }, { "vim": "1.2" }])): ret = zypper.upgrade(dist_upgrade=True, fromrepo=["Dummy", "Dummy2"], novendorchange=True) self.assertDictEqual(ret, {"vim": { "old": "1.1", "new": "1.2" }}) zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--from', "Dummy", '--from', 'Dummy2', '--no-allow-vendor-change')
def test_encrypted_string_is_unchanged_when_gpg_fails(self): d_mock = self.make_decryption_mock() d_mock.ok = False with patch('gnupg.GPG.decrypt', return_value=d_mock): new_s = self.render_sls(ENCRYPTED_STRING) self.assertEqual(new_s, ENCRYPTED_STRING)
def test_present_when_function_exists_and_permissions(self): self.conn.list_functions.return_value = {'Functions': [function_ret]} self.conn.update_function_code.return_value = function_ret self.conn.get_policy.return_value = { "Policy": json.dumps({ "Version": "2012-10-17", "Statement": [{ "Condition": { "ArnLike": { "AWS:SourceArn": "arn:aws:events:us-east-1:9999999999:rule/fooo" } }, "Action": "lambda:InvokeFunction", "Resource": "arn:aws:lambda:us-east-1:999999999999:function:testfunction", "Effect": "Allow", "Principal": { "Service": "events.amazonaws.com" }, "Sid": "AWSEvents_foo-bar999999999999" }], "Id": "default" }) } with patch.dict( funcs, {'boto_iam.get_account_id': MagicMock(return_value='1234')}): with TempZipFile() as zipfile: with patch('hashlib.sha256') as sha256: with patch('os.path.getsize', return_value=199): sha = sha256() digest = sha.digest() encoded = sha.encode() encoded.strip.return_value = function_ret['CodeSha256'] result = salt_states['boto_lambda.function_present']( 'function present', FunctionName=function_ret['FunctionName'], Runtime=function_ret['Runtime'], Role=function_ret['Role'], Handler=function_ret['Handler'], ZipFile=zipfile, Description=function_ret['Description'], Timeout=function_ret['Timeout']) self.assertTrue(result['result']) self.assertEqual( result['changes'], { 'old': { 'Permissions': { 'AWSEvents_foo-bar999999999999': { 'Action': 'lambda:InvokeFunction', 'Principal': 'events.amazonaws.com', 'SourceArn': 'arn:aws:events:us-east-1:9999999999:rule/fooo' } } }, 'new': { 'Permissions': { 'AWSEvents_foo-bar999999999999': {} } } })
def test_get_zone(self): ''' Test to get current timezone (i.e. America/Denver) ''' zone = 'MST' with patch.object(salt.utils, 'which', return_value=True): mock_cmd = MagicMock(return_value={ 'stderr': 'error', 'retcode': 1 }) with patch.dict(timezone.__salt__, {'cmd.run_all': mock_cmd}): self.assertRaises(CommandExecutionError, timezone.get_zone) mock_cmd = MagicMock(return_value={ 'stdout': 'Timezone: {0}'.format(zone), 'retcode': 0 }) with patch.dict(timezone.__salt__, {'cmd.run_all': mock_cmd}): self.assertEqual(timezone.get_zone(), zone) mock_cmd = MagicMock(return_value={ 'stdout': 'ZoneCTL: {0}'.format(zone), 'retcode': 0 }) with patch.dict(timezone.__salt__, {'cmd.run_all': mock_cmd}): self.assertRaises(CommandExecutionError, timezone.get_zone) with patch.object(salt.utils, 'which', return_value=False): file_data = '\n'.join(['#', 'A']) with patch('salt.utils.fopen', mock_open(read_data=file_data), create=True) as mfile: mfile.return_value.__iter__.return_value = file_data.splitlines( ) with patch.dict(timezone.__grains__, { 'os_family': 'Debian', 'os': 'Debian' }): self.assertEqual(timezone.get_zone(), '#\nA') with patch.dict(timezone.__grains__, { 'os_family': 'Gentoo', 'os': 'Gentoo' }): self.assertEqual(timezone.get_zone(), '') with patch.dict(timezone.__grains__, { 'os_family': 'FreeBSD', 'os': 'FreeBSD' }): zone = 'America/Denver' linkpath = '/usr/share/zoneinfo/' + zone with patch.object(os, 'readlink', return_value=linkpath): self.assertEqual(timezone.get_zone(), zone) with patch.dict(timezone.__grains__, { 'os_family': 'Solaris', 'os': 'Solaris' }): fl_data = 'TZ=Foo\n' with patch('salt.utils.fopen', mock_open(read_data=fl_data)) as mfile: mfile.return_value.__iter__.return_value = [fl_data] self.assertEqual(timezone.get_zone(), 'Foo')
def test__issue10959(self): ''' handle multi old style crontabs https://github.com/saltstack/salt/issues/10959 ''' with patch( 'salt.modules.cron.raw_cron', new=MagicMock(side_effect=get_crontab) ): set_crontab( '# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' # as managed per salt, the last lines will be merged together ! '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * samecmd\n' '* * * * * otheridcmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n0 * * * * samecmd1\n' '1 * * * * samecmd1\n' '0 * * * * otheridcmd1\n' '1 * * * * otheridcmd1\n' # special case here, none id managed line with same command # as a later id managed line will become managed '# SALT_CRON_IDENTIFIER:1\n0 * * * * otheridcmd1\n' '# SALT_CRON_IDENTIFIER:2\n0 * * * * otheridcmd1\n' ) crons1 = cron.list_tab('root') # the filtering is done on save, we reflect in listing # the same that we have in a file, no matter what we # have self.assertEqual(crons1, { 'crons': [ {'cmd': 'ls', 'comment': 'uoo', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*', 'commented': False}, {'cmd': 'too', 'comment': 'uuoo', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*', 'commented': False}, {'cmd': 'zoo', 'comment': 'uuuoo', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*', 'commented': False}, {'cmd': 'yoo', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*', 'commented': False}, {'cmd': 'xoo', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*', 'commented': False}, {'cmd': 'samecmd', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '*', 'month': '*', 'commented': False}, {'cmd': 'samecmd', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '*', 'month': '*', 'commented': False}, {'cmd': 'otheridcmd', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '*', 'month': '*', 'commented': False}, {'cmd': 'otheridcmd', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '*', 'month': '*', 'commented': False}, {'cmd': 'samecmd1', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': 'NO ID SET', 'minute': '0', 'month': '*', 'commented': False}, {'cmd': 'samecmd1', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '1', 'month': '*', 'commented': False}, {'cmd': 'otheridcmd1', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '0', 'month': '*', 'commented': False}, {'cmd': 'otheridcmd1', 'comment': None, 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': None, 'minute': '1', 'month': '*', 'commented': False}, {'cmd': 'otheridcmd1', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': '1', 'minute': '0', 'month': '*', 'commented': False}, {'cmd': 'otheridcmd1', 'comment': '', 'daymonth': '*', 'dayweek': '*', 'hour': '*', 'identifier': '2', 'minute': '0', 'month': '*', 'commented': False} ], 'env': [], 'pre': [], 'special': []}) # so yood so far, no problem for now, trying to save the # multilines without id crons now inc_tests = [ ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n' '0 * * * * samecmd1'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n1 * * * * samecmd1'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n1 * * * * samecmd1\n' '0 * * * * otheridcmd1'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n1 * * * * samecmd1\n' '1 * * * * otheridcmd1'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n1 * * * * samecmd1\n' '# SALT_CRON_IDENTIFIER:1\n0 * * * * otheridcmd1'), # ('# Lines below here are managed by Salt, do not edit\n' '# uoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * ls\n' '# uuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * too\n' '# uuuoo SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * zoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * yoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * xoo\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n* * * * * samecmd\n' '* * * * * otheridcmd\n' '# SALT_CRON_IDENTIFIER:NO ID SET\n1 * * * * samecmd1\n' '# SALT_CRON_IDENTIFIER:1\n0 * * * * otheridcmd1\n' '# SALT_CRON_IDENTIFIER:2\n0 * * * * otheridcmd1') ] set_crontab('') for idx, cr in enumerate(crons1['crons']): cron.set_job('root', **cr) self.assertEqual( get_crontab(), inc_tests[idx], ( "idx {0}\n'{1}'\n != \n'{2}'\n\n\n" "\'{1}\' != \'{2}\'" ).format( idx, get_crontab(), inc_tests[idx]))
def test_get_conn_with_no_auth_params_raises_invocation_error(self): with patch('boto.{0}.connect_to_region'.format(service), side_effect=boto.exception.NoAuthHandlerFound()): with self.assertRaises(SaltInvocationError): salt.utils.boto.get_connection(service)
def test_gen_jid(self): now = datetime.datetime(2002, 12, 25, 12, 00, 00, 00) with patch('datetime.datetime'): datetime.datetime.now.return_value = now ret = utils.jid.gen_jid() self.assertEqual(ret, '20021225120000000000')
def test_present_with_failure(self): with patch('moto.ec2.models.VPCBackend.create_vpc', side_effect=BotoServerError(400, 'Mocked error')): vpc_present_result = salt_states['boto_vpc.present']('test', cidr_block) self.assertFalse(vpc_present_result['result']) self.assertTrue('Mocked error' in vpc_present_result['comment'])
def test_parse_zone(self): with patch('salt.utils.fopen', mock_open(read_data=mock_soa_zone)): print mock_soa_zone print dnsutil.parse_zone('/var/lib/named/example.com.zone')
def test_absent_with_failure(self): vpc = self._create_vpc(name='test') with patch('moto.ec2.models.VPCBackend.delete_vpc', side_effect=BotoServerError(400, 'Mocked error')): vpc_absent_result = salt_states['boto_vpc.absent']('test') self.assertFalse(vpc_absent_result['result']) self.assertTrue('Mocked error' in vpc_absent_result['comment'])
def test_get_hwclock(self): ''' Test to get current hardware clock setting (UTC or localtime) ''' mock_t = MagicMock(return_value=True) mock_f = MagicMock(return_value=False) with patch.object(salt.utils, 'which', return_value=True): with patch.object( timezone, '_timedatectl', MagicMock( return_value={'stdout': 'rtc in local tz:yes\n'})): self.assertEqual(timezone.get_hwclock(), 'localtime') with patch.object( timezone, '_timedatectl', MagicMock( return_value={'stdout': 'rtc in local tz:No\n'})): self.assertEqual(timezone.get_hwclock(), 'UTC') with patch.object(timezone, '_timedatectl', MagicMock(return_value={'stdout': 'rtc'})): self.assertRaises(CommandExecutionError, timezone.get_hwclock) with patch.object(salt.utils, 'which', return_value=False): with patch.dict(timezone.__grains__, {'os_family': 'RedHat'}): with patch.dict(timezone.__salt__, {'cmd.run': MagicMock(return_value='A')}): self.assertEqual(timezone.get_hwclock(), 'A') with patch.dict(timezone.__grains__, {'os_family': 'Suse'}): with patch.dict(timezone.__salt__, {'cmd.run': MagicMock(return_value='A')}): self.assertEqual(timezone.get_hwclock(), 'A') with patch.dict(timezone.__grains__, {'os_family': 'Debian'}): fl_data = 'UTC=yes\n' with patch('salt.utils.fopen', mock_open(read_data=fl_data)) as mfile: mfile.return_value.__iter__.return_value = [fl_data] self.assertEqual(timezone.get_hwclock(), 'UTC') fl_data = 'UTC=no\n' with patch('salt.utils.fopen', mock_open(read_data=fl_data)) as mfile: mfile.return_value.__iter__.return_value = [fl_data] self.assertEqual(timezone.get_hwclock(), 'localtime') with patch.dict(timezone.__grains__, {'os_family': 'Gentoo'}): fl_data = 'clock=UTC\n' with patch('salt.utils.fopen', mock_open(read_data=fl_data)) as mfile: mfile.return_value.__iter__.return_value = [fl_data] self.assertEqual(timezone.get_hwclock(), 'UTC') with patch.object(os.path, 'isfile', mock_t): fl_data = 'zone_info=GMT' with patch('salt.utils.fopen', mock_open(read_data=fl_data), create=True) as mfile: mfile.return_value.__iter__.return_value = fl_data.splitlines() with patch.object(salt.utils, 'which', return_value=False): with patch.dict(timezone.__grains__, {'os_family': 'Solaris'}): self.assertEqual(timezone.get_hwclock(), 'UTC') with patch.object(os.path, 'isfile', mock_t): fl_data = 'A=GMT' with patch('salt.utils.fopen', mock_open(read_data=fl_data), create=True) as mfile: mfile.return_value.__iter__.return_value = fl_data.splitlines() with patch.object(salt.utils, 'which', return_value=False): with patch.dict(timezone.__grains__, {'os_family': 'Solaris'}): self.assertEqual(timezone.get_hwclock(), 'localtime') with patch.object(salt.utils, 'which', return_value=False): with patch.dict(timezone.__grains__, {'os_family': 'Solaris'}): with patch.object(os.path, 'isfile', mock_f): self.assertEqual(timezone.get_hwclock(), 'UTC')
def test_gnu_slash_linux_in_os_name(self): ''' Test to return a list of all enabled services ''' _path_exists_map = { '/proc/1/cmdline': False } _path_isfile_map = {} _cmd_run_map = { 'dpkg --print-architecture': 'amd64' } path_exists_mock = MagicMock(side_effect=lambda x: _path_exists_map[x]) path_isfile_mock = MagicMock( side_effect=lambda x: _path_isfile_map.get(x, False) ) cmd_run_mock = MagicMock( side_effect=lambda x: _cmd_run_map[x] ) empty_mock = MagicMock(return_value={}) orig_import = __import__ def _import_mock(name, *args): if name == 'lsb_release': raise ImportError('No module named lsb_release') return orig_import(name, *args) # Skip the first if statement with patch.object(salt.utils, 'is_proxy', MagicMock(return_value=False)): # Skip the selinux/systemd stuff (not pertinent) with patch.object(core, '_linux_bin_exists', MagicMock(return_value=False)): # Skip the init grain compilation (not pertinent) with patch.object(os.path, 'exists', path_exists_mock): # Ensure that lsb_release fails to import with patch('__builtin__.__import__', side_effect=_import_mock): # Skip all the /etc/*-release stuff (not pertinent) with patch.object(os.path, 'isfile', path_isfile_mock): # Mock platform.linux_distribution to give us the # OS name that we want. distro_mock = MagicMock( return_value=('Debian GNU/Linux', '8.3', '') ) with patch.object( platform, 'linux_distribution', distro_mock): # Make a bunch of functions return empty dicts, # we don't care about these grains for the # purposes of this test. with patch.object( core, '_linux_cpudata', empty_mock): with patch.object( core, '_linux_gpu_data', empty_mock): with patch.object( core, '_memdata', empty_mock): with patch.object( core, '_hw_data', empty_mock): with patch.object( core, '_virtual', empty_mock): with patch.object( core, '_ps', empty_mock): # Mock the osarch with patch.dict( core.__salt__, {'cmd.run': cmd_run_mock}): os_grains = core.os_data() self.assertEqual(os_grains.get('os_family'), 'Debian')
def test_replace_auth_key(self): ''' Test the _replace_auth_key with some different authorized_keys examples ''' # First test a known working example, gathered from the authorized_keys file # in the integration test files. enc = 'ssh-rsa' key = 'AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+' \ 'PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNl' \ 'GEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWp' \ 'XLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal' \ '72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi' \ '/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==' options = 'command="/usr/local/lib/ssh-helper"' email = 'github.com' # Write out the authorized key to a temporary file temp_file = tempfile.NamedTemporaryFile(delete=False, mode='w+') temp_file.write('{0} {1} {2} {3}'.format(options, enc, key, email)) temp_file.close() with patch.dict(ssh.__salt__, {'user.info': MagicMock(return_value={})}): with patch('salt.modules.ssh._get_config_file', MagicMock(return_value=temp_file.name)): ssh._replace_auth_key('foo', key, config=temp_file.name) # The previous authorized key should have been replaced by the simpler one with salt.utils.fopen(temp_file.name) as _fh: file_txt = _fh.read() self.assertIn(enc, file_txt) self.assertIn(key, file_txt) self.assertNotIn(options, file_txt) self.assertNotIn(email, file_txt) # Now test a very simple key using ecdsa instead of ssh-rsa and with multiple options enc = 'ecdsa-sha2-nistp256' key = 'abcxyz' with salt.utils.fopen(temp_file.name, 'a') as _fh: _fh.write('{0} {1}'.format(enc, key)) # Replace the simple key from before with the more complicated options + new email # Option example is taken from Pull Request #39855 options = [ 'no-port-forwarding', 'no-agent-forwarding', 'no-X11-forwarding', 'command="echo \'Please login as the user \"ubuntu\" rather than the user \"root\".\'' ] email = '*****@*****.**' with patch.dict(ssh.__salt__, {'user.info': MagicMock(return_value={})}): with patch('salt.modules.ssh._get_config_file', MagicMock(return_value=temp_file.name)): ssh._replace_auth_key('foo', key, enc=enc, comment=email, options=options, config=temp_file.name) # Assert that the new line was added as-is to the file with salt.utils.fopen(temp_file.name) as _fh: file_txt = _fh.read() self.assertIn(enc, file_txt) self.assertIn(key, file_txt) self.assertIn('{0} '.format(','.join(options)), file_txt) self.assertIn(email, file_txt)
def setUp(self): with patch('salt.utils.schedule.clean_proc_dir', MagicMock(return_value=None)): self.schedule = Schedule(copy.deepcopy(DEFAULT_CONFIG), {}, returners={})
def test_suse_os_from_cpe_data(self): ''' Test if 'os' grain is parsed from CPE_NAME of /etc/os-release ''' _path_exists_map = { '/proc/1/cmdline': False } _path_isfile_map = { '/etc/os-release': True, } _os_release_map = { 'NAME': 'SLES', 'VERSION': '12-SP1', 'VERSION_ID': '12.1', 'PRETTY_NAME': 'SUSE Linux Enterprise Server 12 SP1', 'ID': 'sles', 'ANSI_COLOR': '0;32', 'CPE_NAME': 'cpe:/o:suse:sles:12:sp1' } path_exists_mock = MagicMock(side_effect=lambda x: _path_exists_map[x]) path_isfile_mock = MagicMock( side_effect=lambda x: _path_isfile_map.get(x, False) ) empty_mock = MagicMock(return_value={}) osarch_mock = MagicMock(return_value="amd64") os_release_mock = MagicMock(return_value=_os_release_map) orig_import = __import__ def _import_mock(name, *args): if name == 'lsb_release': raise ImportError('No module named lsb_release') return orig_import(name, *args) # Skip the first if statement with patch.object(salt.utils, 'is_proxy', MagicMock(return_value=False)): # Skip the selinux/systemd stuff (not pertinent) with patch.object(core, '_linux_bin_exists', MagicMock(return_value=False)): # Skip the init grain compilation (not pertinent) with patch.object(os.path, 'exists', path_exists_mock): # Ensure that lsb_release fails to import with patch('__builtin__.__import__', side_effect=_import_mock): # Skip all the /etc/*-release stuff (not pertinent) with patch.object(os.path, 'isfile', path_isfile_mock): with patch.object(core, '_parse_os_release', os_release_mock): # Mock platform.linux_distribution to give us the # OS name that we want. distro_mock = MagicMock( return_value=('SUSE Linux Enterprise Server ', '12', 'x86_64') ) with patch.object(platform, 'linux_distribution', distro_mock): with patch.object(core, '_linux_gpu_data', empty_mock): with patch.object(core, '_linux_cpudata', empty_mock): with patch.object(core, '_virtual', empty_mock): # Mock the osarch with patch.dict(core.__salt__, {'cmd.run': osarch_mock}): os_grains = core.os_data() self.assertEqual(os_grains.get('os_family'), 'Suse') self.assertEqual(os_grains.get('os'), 'SUSE')
def test_interfaces_ifconfig_solaris(self): with patch('salt.utils.is_sunos', lambda: True): interfaces = network._interfaces_ifconfig(SOLARIS) expected_interfaces = { 'ilbint0': { 'inet6': [], 'inet': [{ 'broadcast': '10.6.0.255', 'netmask': '255.255.255.0', 'address': '10.6.0.11' }], 'up': True }, 'lo0': { 'inet6': [{ 'prefixlen': '128', 'address': '::1' }], 'inet': [{ 'netmask': '255.0.0.0', 'address': '127.0.0.1' }], 'up': True }, 'ilbext0': { 'inet6': [], 'inet': [{ 'broadcast': '10.10.11.31', 'netmask': '255.255.255.224', 'address': '10.10.11.11' }, { 'broadcast': '10.10.11.31', 'netmask': '255.255.255.224', 'address': '10.10.11.12' }], 'up': True }, 'vpn0': { 'inet6': [], 'inet': [{ 'netmask': '255.0.0.0', 'address': '10.6.0.14' }], 'up': True }, 'net0': { 'inet6': [{ 'prefixlen': '10', 'address': 'fe80::221:9bff:fefd:2a22' }], 'inet': [{ 'broadcast': '10.10.10.63', 'netmask': '255.255.255.224', 'address': '10.10.10.38' }], 'up': True } } self.assertEqual(interfaces, expected_interfaces)
def test_present(self): with patch('requests.get', mock_json_response([])): with patch('requests.post') as rpost: ret = grafana_datasource.present('test', 'type', 'url', profile=profile) rpost.assert_called_once_with( 'http://grafana/api/datasources', grafana_datasource._get_json_data('test', 'type', 'url'), headers={ 'Authorization': 'Bearer token', 'Accept': 'application/json' }, timeout=3) self.assertTrue(ret['result']) self.assertEqual(ret['comment'], 'New data source test added') data = grafana_datasource._get_json_data('test', 'type', 'url') data.update({'id': 1, 'orgId': 1}) with patch('requests.get', mock_json_response([data])): with patch('requests.put') as rput: ret = grafana_datasource.present('test', 'type', 'url', profile=profile) rput.assert_called_once_with( 'http://grafana/api/datasources/1', grafana_datasource._get_json_data('test', 'type', 'url'), headers={ 'Authorization': 'Bearer token', 'Accept': 'application/json' }, timeout=3) self.assertTrue(ret['result']) self.assertEqual(ret['comment'], 'Data source test already up-to-date') self.assertEqual(ret['changes'], None) with patch('requests.put') as rput: ret = grafana_datasource.present('test', 'type', 'newurl', profile=profile) rput.assert_called_once_with( 'http://grafana/api/datasources/1', grafana_datasource._get_json_data('test', 'type', 'newurl'), headers={ 'Authorization': 'Bearer token', 'Accept': 'application/json' }, timeout=3) self.assertTrue(ret['result']) self.assertEqual(ret['comment'], 'Data source test updated') self.assertEqual(ret['changes'], { 'old': { 'url': 'url' }, 'new': { 'url': 'newurl' } })
def test_check_ipc_length(self): ''' Ensure we throw an exception if we have a too-long IPC URI ''' with patch('zmq.IPC_PATH_MAX_LEN', 1): self.assertRaises(SaltSystemExit, utils.zeromq.check_ipc_path_max_len, '1' * 1024)