def test_create_repo_ubuntu_doesnt_repo_exist(self, file_mock, execute_mock, tempfile_mock): tempfile_mock.return_value = MagicMock(spec=file) tempfile_mock.return_value.__enter__.return_value.name = "/tmp/1.txt" with Environment('/') as env: with patch.object( repository, "Template", new=DummyTemplate.create(DEBIAN_DEFAUTL_TEMPLATE)): with patch.object(repository, "__file__", new='/ambari/test/repo/dummy/path/file'): Repository('HDP', base_url='http://download.base_url.org/rpm/', repo_file_name='HDP', repo_template="dummy.j2", components=['a', 'b', 'c']) call_content = file_mock.call_args_list[0] template_name = call_content[0][0] template_content = call_content[1]['content'] self.assertEquals(template_name, '/tmp/1.txt') self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c') self.assertEqual(file_mock.call_count, 1) self.assertEqual(execute_mock.call_count, 0)
def test_run_in_bash(self): class MockConanfile(object): def __init__(self): self.output = namedtuple("output", "info")( lambda x: None) # @UnusedVariable self.env = {"PATH": "/path/to/somewhere"} class MyRun(object): def __call__(self, command, output, log_filepath=None, cwd=None, subprocess=False): # @UnusedVariable self.command = command self._conan_runner = MyRun() conanfile = MockConanfile() with patch.object(OSInfo, "bash_path", return_value='bash'): tools.run_in_windows_bash(conanfile, "a_command.bat", subsystem="cygwin") self.assertIn("bash", conanfile._conan_runner.command) self.assertIn("--login -c", conanfile._conan_runner.command) self.assertIn("^&^& a_command.bat ^", conanfile._conan_runner.command) with tools.environment_append( {"CONAN_BASH_PATH": "path\\to\\mybash.exe"}): tools.run_in_windows_bash(conanfile, "a_command.bat", subsystem="cygwin") self.assertIn('path\\to\\mybash.exe --login -c', conanfile._conan_runner.command) with tools.environment_append( {"CONAN_BASH_PATH": "path with spaces\\to\\mybash.exe"}): tools.run_in_windows_bash(conanfile, "a_command.bat", subsystem="cygwin", with_login=False) self.assertIn('"path with spaces\\to\\mybash.exe" -c', conanfile._conan_runner.command) # try to append more env vars conanfile = MockConanfile() with patch.object(OSInfo, "bash_path", return_value='bash'): tools.run_in_windows_bash(conanfile, "a_command.bat", subsystem="cygwin", env={ "PATH": "/other/path", "MYVAR": "34" }) self.assertIn( '^&^& PATH=\\^"/cygdrive/other/path:/cygdrive/path/to/somewhere:$PATH\\^" ' '^&^& MYVAR=34 ^&^& a_command.bat ^', conanfile._conan_runner.command)
def setUp(self): return_value = [{'Revision': '2019-02-13 08:48:55'}] models = [ Parameter, DataCenter, HorizontalDataResolution, Instrument, ISOTopicCategory, Location, Platform, Project, ScienceKeyword, TemporalDataResolution, VerticalDataResolution, ] # mock get_list in all managers self.get_list_mocks = [ patch.object(model.objects, 'get_list', return_value=return_value).start() for model in models ] # mock update in all managers self.update_mocks = [ patch.object(model.objects, 'update', return_value=return_value).start() for model in models ]
def executeScript(self, path, classname=None, command=None, config_file=None, # common mocks for all the scripts config_overrides = None, shell_mock_value = (0, "OK."), os_type=('Suse','11','Final'), kinit_path_local="/usr/bin/kinit" ): norm_path = os.path.normpath(path) src_dir = RMFTestCase._getSrcFolder() stack_version = norm_path.split(os.sep)[0] stacks_path = os.path.join(src_dir, PATH_TO_STACKS) configs_path = os.path.join(src_dir, PATH_TO_STACK_TESTS, stack_version, "configs") script_path = os.path.join(stacks_path, norm_path) config_file_path = os.path.join(configs_path, config_file) try: with open(config_file_path, "r") as f: self.config_dict = json.load(f) except IOError: raise RuntimeError("Can not read config file: "+ config_file_path) if config_overrides: for key, value in config_overrides.iteritems(): self.config_dict[key] = value self.config_dict = ConfigDictionary(self.config_dict) # append basedir to PYTHONPATH scriptsdir = os.path.dirname(script_path) basedir = os.path.dirname(scriptsdir) sys.path.append(scriptsdir) # get method to execute try: with patch.object(platform, 'linux_distribution', return_value=os_type): script_module = imp.load_source(classname, script_path) except IOError: raise RuntimeError("Cannot load class %s from %s",classname, norm_path) script_class_inst = RMFTestCase._get_attr(script_module, classname)() method = RMFTestCase._get_attr(script_class_inst, command) # Reload params import, otherwise it won't change properties during next import if 'params' in sys.modules: del(sys.modules["params"]) # run with Environment(basedir, test_mode=True) as RMFTestCase.env: with patch('resource_management.core.shell.checked_call', return_value=shell_mock_value): # we must always mock any shell calls with patch.object(Script, 'get_config', return_value=self.config_dict): # mocking configurations with patch.object(Script, 'get_tmp_dir', return_value="/tmp"): with patch.object(Script, 'install_packages'): with patch('resource_management.libraries.functions.get_kinit_path', return_value=kinit_path_local): with patch.object(platform, 'linux_distribution', return_value=os_type): method(RMFTestCase.env) sys.path.remove(scriptsdir)
def executeScript(self, path, classname=None, command=None, config_file=None, # common mocks for all the scripts config_overrides = None, shell_mock_value = (0, "OK."), os_type=('Suse','11','Final'), kinit_path_local="/usr/bin/kinit" ): norm_path = os.path.normpath(path) src_dir = RMFTestCase._getSrcFolder() stack_version = norm_path.split(os.sep)[0] stacks_path = os.path.join(src_dir, PATH_TO_STACKS) configs_path = os.path.join(src_dir, PATH_TO_STACK_TESTS, stack_version, "configs") script_path = os.path.join(stacks_path, norm_path) config_file_path = os.path.join(configs_path, config_file) try: with open(config_file_path, "r") as f: self.config_dict = json.load(f) except IOError: raise RuntimeError("Can not read config file: "+ config_file_path) if config_overrides: for key, value in config_overrides.iteritems(): self.config_dict[key] = value self.config_dict = ConfigDictionary(self.config_dict) # append basedir to PYTHONPATH scriptsdir = os.path.dirname(script_path) basedir = os.path.dirname(scriptsdir) sys.path.append(scriptsdir) # get method to execute try: with patch.object(platform, 'linux_distribution', return_value=os_type): script_module = imp.load_source(classname, script_path) except IOError: raise RuntimeError("Cannot load class %s from %s",classname, norm_path) script_class_inst = RMFTestCase._get_attr(script_module, classname)() method = RMFTestCase._get_attr(script_class_inst, command) # Reload params import, otherwise it won't change properties during next import if 'params' in sys.modules: del(sys.modules["params"]) # run with Environment(basedir, test_mode=True) as RMFTestCase.env: with patch('resource_management.core.shell.checked_call', return_value=shell_mock_value): # we must always mock any shell calls with patch.object(Script, 'get_config', return_value=self.config_dict): # mocking configurations with patch.object(Script, 'install_packages'): with patch('resource_management.libraries.functions.get_kinit_path', return_value=kinit_path_local): with patch.object(platform, 'linux_distribution', return_value=os_type): method(RMFTestCase.env) sys.path.remove(scriptsdir)
def test_install_hbase_master_with_version(self, get_provider): from ambari_commons.os_check import OSConst from ambari_commons.repo_manager import ManagerFactory pkg_manager = ManagerFactory.get_new_instance(OSConst.REDHAT_FAMILY) with patch.object(pkg_manager, "all_packages") as all_packages,\ patch.object(pkg_manager, "available_packages") as available_packages, \ patch.object(pkg_manager, "installed_packages") as installed_packages: all_packages.return_value = [["hbase_2_3_0_1_1234", "1.0", "testrepo"]] available_packages.return_value = [["hbase_2_3_0_1_1234", "1.0", "testrepo"]] installed_packages.return_value = [["hbase_2_3_0_1_1234", "1.0", "testrepo"]] get_provider.return_value = pkg_manager config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/hbase_with_phx.json" with open(config_file, "r") as f: json_content = json.load(f) version = '2.3.0.1-1234' # the json file is not a "well formed" install command json_content['roleCommand'] = 'INSTALL' json_content['commandParams']['version'] = version json_content['commandParams']['package_list'] = "[{\"name\":\"hbase_${stack_version}\",\"condition\":\"\",\"skipUpgrade\":false}]" self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py", classname = "HbaseMaster", command = "install", config_dict = json_content, stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES, try_install=True, os_type=('Redhat', '6.4', 'Final'), checked_call_mocks = [(0, "OK.", "")], available_packages_in_repos = ['hbase_2_3_0_1_1234'], ) # only assert that the correct package is trying to be installed self.assertResourceCalled('Package', 'hbase_2_3_0_1_1234', retry_count=5, retry_on_repo_unavailability=False) def test_configure_default(self): self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py", classname = "HbaseMaster", command = "configure", config_file="default.json", stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) self.assert_configure_default() self.assertNoMoreResources()
def test_create_repo_redhat(self, file_mock, is_redhat_family, is_ubuntu_family, is_suse_family): is_redhat_family.return_value = True is_ubuntu_family.return_value = False is_suse_family.return_value = False with Environment('/') as env: with patch.object( repository, "Template", new=DummyTemplate.create(RHEL_SUSE_DEFAULT_TEMPLATE)): with patch.object(repository, "__file__", new='/ambari/test/repo/dummy/path/file'): Repository( 'hadoop', base_url='http://download.base_url.org/rpm/', mirror_list= 'https://mirrors.base_url.org/?repo=Repository&arch=$basearch', repo_file_name='Repository', repo_template='dummy.j2') self.assertTrue('hadoop' in env.resources['Repository']) defined_arguments = env.resources['Repository'][ 'hadoop'].arguments expected_arguments = { 'repo_template': 'dummy.j2', 'base_url': 'http://download.base_url.org/rpm/', 'mirror_list': 'https://mirrors.base_url.org/?repo=Repository&arch=$basearch', 'repo_file_name': 'Repository' } expected_template_arguments = { 'base_url': 'http://download.base_url.org/rpm/', 'mirror_list': 'https://mirrors.base_url.org/?repo=Repository&arch=$basearch', 'repo_file_name': 'Repository' } self.assertEqual(defined_arguments, expected_arguments) self.assertEqual(file_mock.call_args[0][0], '/etc/yum.repos.d/Repository.repo') template_item = file_mock.call_args[1]['content'] template = str(template_item.name) expected_template_arguments.update({'repo_id': 'hadoop'}) self.assertEqual(expected_template_arguments, template_item.context._dict) self.assertEqual( '/ambari/test/repo/dummy/path/../data/dummy.j2', template)
def test_create_repo_ubuntu_repo_exists(self, file_mock, execute_mock, tempfile_mock, checked_call_mock, is_redhat_family, is_ubuntu_family, is_suse_family): is_redhat_family.return_value = False is_ubuntu_family.return_value = True is_suse_family.return_value = False tempfile_mock.return_value = MagicMock(spec=file) tempfile_mock.return_value.__enter__.return_value.name = "/tmp/1.txt" checked_call_mock.return_value = 0, "The following signatures couldn't be verified because the public key is not available: NO_PUBKEY 123ABCD" with Environment('/') as env: with patch.object( repository, "Template", new=DummyTemplate.create(DEBIAN_DEFAUTL_TEMPLATE)): with patch.object(repository, "__file__", new='/ambari/test/repo/dummy/path/file'): Repository('HDP', base_url='http://download.base_url.org/rpm/', repo_file_name='HDP', repo_template="dummy.j2", components=['a', 'b', 'c']) call_content = file_mock.call_args_list[0] template_name = call_content[0][0] template_content = call_content[1]['content'] self.assertEquals(template_name, '/tmp/1.txt') self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c') copy_item = str(file_mock.call_args_list[1]) self.assertEqual( copy_item, "call('/etc/apt/sources.list.d/HDP.list', content=StaticFile('/tmp/1.txt'))" ) #'apt-get update -qq -o Dir::Etc::sourcelist="sources.list.d/HDP.list" -o APT::Get::List-Cleanup="0"') execute_command_item = execute_mock.call_args_list[0][0][0] self.assertEqual(checked_call_mock.call_args_list[0][0][0], [ 'apt-get', 'update', '-qq', '-o', 'Dir::Etc::sourcelist=sources.list.d/HDP.list', '-o', 'Dir::Etc::sourceparts=-', '-o', 'APT::Get::List-Cleanup=0' ]) self.assertEqual( execute_command_item, 'apt-key adv --recv-keys --keyserver keyserver.ubuntu.com 123ABCD')
def test_create_repo_ubuntu_gpg_key_wrong_output(self, file_mock, execute_mock, tempfile_mock, checked_call_mock): """ Checks that GPG key is extracted from output without \r sign """ tempfile_mock.return_value = MagicMock(spec=file) tempfile_mock.return_value.__enter__.return_value.name = "/tmp/1.txt" checked_call_mock.return_value = 0, "The following signatures couldn't be verified because the public key is not available: NO_PUBKEY 123ABCD\r\n" with Environment('/') as env: with patch.object(repository, "__file__", new='/ambari/test/repo/dummy/path/file'): Repository('HDP', base_url='http://download.base_url.org/rpm/', repo_file_name='HDP', repo_template = DEBIAN_DEFAUTL_TEMPLATE, components = ['a','b','c'] ) call_content = file_mock.call_args_list[0] template_name = call_content[0][0] template_content = call_content[1]['content'] self.assertEquals(template_name, '/tmp/1.txt') self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c') copy_item = str(file_mock.call_args_list[1]) self.assertEqual(copy_item, "call('/etc/apt/sources.list.d/HDP.list', content=StaticFile('/tmp/1.txt'))") execute_command_item = execute_mock.call_args_list[0][0][0] self.assertEqual(checked_call_mock.call_args_list[0][0][0], ['apt-get', 'update', '-qq', '-o', 'Dir::Etc::sourcelist=sources.list.d/HDP.list', '-o', 'Dir::Etc::sourceparts=-', '-o', 'APT::Get::List-Cleanup=0']) self.assertEqual(execute_command_item, 'apt-key adv --recv-keys --keyserver keyserver.ubuntu.com 123ABCD')
def assertResourceCalledIgnoreEarlier(self, resource_type, name, **kwargs): """ Fast fowards past earlier resources called, popping them off the list until the specified resource is hit. If it's not found, then an assertion is thrown that there are no more resources. """ with patch.object(UnknownConfiguration, '__getattr__', return_value=lambda: "UnknownConfiguration()"): while len(RMFTestCase.env.resource_list) >= 0: # no more items means exit the loop self.assertNotEqual( len(RMFTestCase.env.resource_list), 0, "The specified resource was not found in the call stack.") # take the next resource and try it out resource = RMFTestCase.env.resource_list.pop(0) try: self.assertEquals(resource_type, resource.__class__.__name__) self.assertEquals(name, resource.name) self.assertEquals(kwargs, resource.arguments) break except AssertionError: pass
def test_create_repo_suse(self, file_mock): with Environment('/') as env: with patch.object(repository,"Template", new=DummyTemplate.create(RHEL_SUSE_DEFAULT_TEMPLATE)): Repository('hadoop', base_url='http://download.base_url.org/rpm/', mirror_list='https://mirrors.base_url.org/?repo=Repository&arch=$basearch', repo_template = "dummy.j2", repo_file_name='Repository') self.assertTrue('hadoop' in env.resources['Repository']) defined_arguments = env.resources['Repository']['hadoop'].arguments expected_arguments = {'repo_template': 'dummy.j2', 'mirror_list': 'https://mirrors.base_url.org/?repo=Repository&arch=$basearch', 'base_url': 'http://download.base_url.org/rpm/', 'repo_file_name': 'Repository'} expected_template_arguments = {'mirror_list': 'https://mirrors.base_url.org/?repo=Repository&arch=$basearch', 'base_url': 'http://download.base_url.org/rpm/', 'repo_file_name': 'Repository'} self.assertEqual(defined_arguments, expected_arguments) self.assertEqual(file_mock.call_args[0][0], '/etc/zypp/repos.d/Repository.repo') template_item = file_mock.call_args[1]['content'] template = str(template_item.name) expected_template_arguments.update({'repo_id': 'hadoop'}) self.assertEqual(expected_template_arguments, template_item.context._dict) self.assertEqual('dummy.j2', template)
def test_create_repo_redhat(self, file_mock, is_redhat_family, is_ubuntu_family, is_suse_family): is_redhat_family.return_value = True is_ubuntu_family.return_value = False is_suse_family.return_value = False with Environment('/') as env: with patch.object(repository, "__file__", new='/ambari/test/repo/dummy/path/file'): Repository('hadoop', base_url='http://download.base_url.org/rpm/', mirror_list='https://mirrors.base_url.org/?repo=Repository&arch=$basearch', repo_file_name='Repository', repo_template=RHEL_SUSE_DEFAULT_TEMPLATE) self.assertTrue('hadoop' in env.resources['Repository']) defined_arguments = env.resources['Repository']['hadoop'].arguments expected_arguments = {'repo_template': RHEL_SUSE_DEFAULT_TEMPLATE, 'base_url': 'http://download.base_url.org/rpm/', 'mirror_list': 'https://mirrors.base_url.org/?repo=Repository&arch=$basearch', 'repo_file_name': 'Repository'} expected_template_arguments = {'base_url': 'http://download.base_url.org/rpm/', 'mirror_list': 'https://mirrors.base_url.org/?repo=Repository&arch=$basearch', 'repo_file_name': 'Repository'} self.assertEqual(defined_arguments, expected_arguments) self.assertEqual(file_mock.call_args[0][0], '/etc/yum.repos.d/Repository.repo') template_item = file_mock.call_args[1]['content'] template = str(template_item.name) expected_template_arguments.update({'repo_id': 'hadoop'}) self.assertEqual(expected_template_arguments, template_item.context._dict) self.assertEqual(RHEL_SUSE_DEFAULT_TEMPLATE, template)
def test_execute_retryable_command_fail_and_succeed(self, CustomServiceOrchestrator_mock, sleep_mock ): CustomServiceOrchestrator_mock.return_value = None dummy_controller = MagicMock() actionQueue = ActionQueue(AmbariConfig(), dummy_controller) execution_result_fail_dict = { 'exitcode': 1, 'stdout': 'out', 'stderr': 'stderr', 'structuredOut': '', 'status': 'FAILED' } execution_result_succ_dict = { 'exitcode': 0, 'stdout': 'out', 'stderr': 'stderr', 'structuredOut': '', 'status': 'COMPLETED' } command = copy.deepcopy(self.retryable_command) with patch.object(CustomServiceOrchestrator, "runCommand") as runCommand_mock: runCommand_mock.side_effect = [execution_result_fail_dict, execution_result_succ_dict] actionQueue.execute_command(command) #assert that python executor start self.assertTrue(runCommand_mock.called) self.assertEqual(2, runCommand_mock.call_count) self.assertEqual(1, sleep_mock.call_count) sleep_mock.assert_any_call(2)
def test_create_repo_debian_repo_exists(self, file_mock, execute_mock, tempfile_mock): tempfile_mock.return_value = MagicMock(spec=file) tempfile_mock.return_value.__enter__.return_value.name = "/tmp/1.txt" with Environment('/') as env: with patch.object(repository,"Template", new=DummyTemplate.create(DEBIAN_DEFAUTL_TEMPLATE)): Repository('HDP', base_url='http://download.base_url.org/rpm/', repo_file_name='HDP', repo_template = "dummy.j2", components = ['a','b','c'] ) template_item = file_mock.call_args_list[0] template_name = template_item[0][0] template_content = template_item[1]['content'].get_content() self.assertEquals(template_name, '/tmp/1.txt') self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c\n') copy_item = str(file_mock.call_args_list[1]) self.assertEqual(copy_item, "call('/etc/apt/sources.list.d/HDP.list', content=StaticFile('/tmp/1.txt'))") execute_command_item = execute_mock.call_args_list[0][0][0] self.assertEqual(execute_command_item, 'apt-get update -o Dir::Etc::sourcelist="sources.list.d/HDP.list" -o APT::Get::List-Cleanup="0"')
def test_create_repo_ubuntu_repo_exists(self, file_mock, execute_mock, tempfile_mock, checked_call_mock): tempfile_mock.return_value = MagicMock(spec=file) tempfile_mock.return_value.__enter__.return_value.name = "/tmp/1.txt" checked_call_mock.return_value = 0, "The following signatures couldn't be verified because the public key is not available: NO_PUBKEY 123ABCD" with Environment('/') as env: with patch.object(repository,"Template", new=DummyTemplate.create(DEBIAN_DEFAUTL_TEMPLATE)): Repository('HDP', base_url='http://download.base_url.org/rpm/', repo_file_name='HDP', repo_template = "dummy.j2", components = ['a','b','c'] ) call_content = file_mock.call_args_list[0] template_name = call_content[0][0] template_content = call_content[1]['content'] self.assertEquals(template_name, '/tmp/1.txt') self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c\n') copy_item = str(file_mock.call_args_list[1]) self.assertEqual(copy_item, "call('/etc/apt/sources.list.d/HDP.list', content=StaticFile('/tmp/1.txt'))") #'apt-get update -qq -o Dir::Etc::sourcelist="sources.list.d/HDP.list" -o APT::Get::List-Cleanup="0"') execute_command_item = execute_mock.call_args_list[0][0][0] self.assertEqual(checked_call_mock.call_args_list[0][0][0], ['apt-get', 'update', '-qq', '-o', 'Dir::Etc::sourcelist=sources.list.d/HDP.list', '-o', 'APT::Get::List-Cleanup=0']) self.assertEqual(execute_command_item, 'apt-key adv --recv-keys --keyserver keyserver.ubuntu.com 123ABCD')
def test_create_repo_ubuntu_gpg_key_wrong_output(self, file_mock, execute_mock, tempfile_mock, checked_call_mock): """ Checks that GPG key is extracted from output without \r sign """ tempfile_mock.return_value = MagicMock(spec=file) tempfile_mock.return_value.__enter__.return_value.name = "/tmp/1.txt" checked_call_mock.return_value = 0, "The following signatures couldn't be verified because the public key is not available: NO_PUBKEY 123ABCD\r\n" with Environment('/') as env: with patch.object(repository, "__file__", new='/ambari/test/repo/dummy/path/file'): Repository('HDP', base_url='http://download.base_url.org/rpm/', repo_file_name='HDP', repo_template = DEBIAN_DEFAUTL_TEMPLATE, components = ['a','b','c'] ) call_content = file_mock.call_args_list[0] template_name = call_content[0][0] template_content = call_content[1]['content'] self.assertEquals(template_name, '/tmp/1.txt') self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c') copy_item0 = str(file_mock.call_args_list[1]) copy_item1 = str(file_mock.call_args_list[2]) self.assertEqual(copy_item0, "call('/tmp/1.txt', content=StaticFile('/etc/apt/sources.list.d/HDP.list'))") self.assertEqual(copy_item1, "call('/etc/apt/sources.list.d/HDP.list', content=StaticFile('/tmp/1.txt'))") execute_command_item = execute_mock.call_args_list[0][0][0] self.assertEqual(checked_call_mock.call_args_list[0][0][0], ['apt-get', 'update', '-qq', '-o', 'Dir::Etc::sourcelist=sources.list.d/HDP.list', '-o', 'Dir::Etc::sourceparts=-', '-o', 'APT::Get::List-Cleanup=0']) self.assertEqual(execute_command_item, ('apt-key', 'adv', '--recv-keys', '--keyserver', 'keyserver.ubuntu.com', '123ABCD'))
def test_multiple_packages(mocked_glob): """See NXP-26137.""" # Fake release release = ReleaseMP("test", None, marketplace_conf="") def get_packages_list(self): """ Return the list of packages to work on. For this test, we only need one marketplace, let's pick AI. """ return ["nuxeo-ai"] with patch.object(ReleaseMP, "get_packages_list", get_packages_list): # Ensure we will only work with the AI marketplace assert release.get_packages_list() == ["nuxeo-ai"] # Use fake values to prevent bad substitution and ease testing release.mp_config.set("DEFAULT", "branch", "master") release.mp_config.set("DEFAULT", "is_final", "True") release.mp_config.set("DEFAULT", "other_versions", "") # Set several packages to upload packages = ["*/target/nuxeo-ai*.zip", "*/*/target/nuxeo-*.zip"] release.mp_config.set("nuxeo-ai", "mp_to_upload", ", ".join(packages)) # Test the release, it will enter into .upload() and test multi packages release.test() # And now we check glog.glob() has been called 2 times for package in packages: mocked_glob.assert_any_call(package)
def test_execute_retryable_command_with_time_lapse(self, CustomServiceOrchestrator_mock, read_stack_version_mock, sleep_mock, time_mock ): CustomServiceOrchestrator_mock.return_value = None dummy_controller = MagicMock() actionQueue = ActionQueue(AmbariConfig(), dummy_controller) python_execution_result_dict = { 'exitcode': 1, 'stdout': 'out', 'stderr': 'stderr', 'structuredOut': '', 'status': 'FAILED' } time_mock.side_effect = [4, 8, 10, 14, 18, 22] def side_effect(command, tmpoutfile, tmperrfile, override_output_files=True, retry=False): return python_execution_result_dict command = copy.deepcopy(self.retryable_command) with patch.object(CustomServiceOrchestrator, "runCommand") as runCommand_mock: runCommand_mock.side_effect = side_effect actionQueue.execute_command(command) #assert that python executor start self.assertTrue(runCommand_mock.called) self.assertEqual(2, runCommand_mock.call_count) self.assertEqual(1, sleep_mock.call_count) sleep_mock.assert_has_calls([call(2)], False) runCommand_mock.assert_has_calls([ call(command, '/tmp/ambari-agent/output-19.txt', '/tmp/ambari-agent/errors-19.txt', override_output_files=True, retry=False), call(command, '/tmp/ambari-agent/output-19.txt', '/tmp/ambari-agent/errors-19.txt', override_output_files=False, retry=True)])
def test_clean_large_registry(self): """ clean_registry() splits invalid_keys into multiple lists for set removal to avoid sending more than redis can receive """ MAX_WORKERS = 41 MAX_KEYS = 37 # srem is called twice per invalid key batch: once for WORKERS_BY_QUEUE_KEY; once for REDIS_WORKER_KEYS SREM_CALL_COUNT = 2 queue = Queue(name='foo') for i in range(MAX_WORKERS): worker = Worker([queue]) register(worker) with patch('rq.worker_registration.MAX_KEYS', MAX_KEYS), \ patch.object(queue.connection, 'pipeline', wraps=queue.connection.pipeline) as pipeline_mock: # clean_worker_registry creates a pipeline with a context manager. Configure the mock using the context # manager entry method __enter__ pipeline_mock.return_value.__enter__.return_value.srem.return_value = None pipeline_mock.return_value.__enter__.return_value.execute.return_value = [ 0 ] * MAX_WORKERS clean_worker_registry(queue) expected_call_count = (ceildiv(MAX_WORKERS, MAX_KEYS)) * SREM_CALL_COUNT self.assertEqual( pipeline_mock.return_value.__enter__.return_value.srem. call_count, expected_call_count)
def setUp(self): return_value = [{'Revision': '2019-02-13 08:48:55'}] models = [ Parameter, DataCenter, HorizontalDataResolution, Instrument, ISOTopicCategory, Location, Platform, Project, ScienceKeyword, TemporalDataResolution, VerticalDataResolution, ] self.get_list_mocks = [] self.update_mocks = [] for model in models: mocked_vocabulary_methods = {} for vocabulary_name in model.objects.vocabularies: get_list_mock = MagicMock(return_value=return_value) update_mock = MagicMock(return_value=None) mocked_vocabulary_methods[vocabulary_name] = { 'get_list': get_list_mock, 'update': update_mock } self.get_list_mocks.append(get_list_mock) self.update_mocks.append(update_mock) patcher = patch.object(model.objects, 'vocabularies', mocked_vocabulary_methods) self.vocabulary_mocks = patcher.start() self.addCleanup(patcher.stop)
def test_execute_retryable_command_fail_and_succeed(self, CustomServiceOrchestrator_mock, read_stack_version_mock, sleep_mock ): CustomServiceOrchestrator_mock.return_value = None dummy_controller = MagicMock() actionQueue = ActionQueue(AmbariConfig(), dummy_controller) execution_result_fail_dict = { 'exitcode': 1, 'stdout': 'out', 'stderr': 'stderr', 'structuredOut': '', 'status': 'FAILED' } execution_result_succ_dict = { 'exitcode': 0, 'stdout': 'out', 'stderr': 'stderr', 'structuredOut': '', 'status': 'COMPLETED' } command = copy.deepcopy(self.retryable_command) with patch.object(CustomServiceOrchestrator, "runCommand") as runCommand_mock: runCommand_mock.side_effect = [execution_result_fail_dict, execution_result_succ_dict] actionQueue.execute_command(command) #assert that python executor start self.assertTrue(runCommand_mock.called) self.assertEqual(2, runCommand_mock.call_count) self.assertEqual(1, sleep_mock.call_count) sleep_mock.assert_any_call(2)
def test_execute_retryable_command(self, CustomServiceOrchestrator_mock, sleep_mock ): CustomServiceOrchestrator_mock.return_value = None dummy_controller = MagicMock() actionQueue = ActionQueue(AmbariConfig(), dummy_controller) python_execution_result_dict = { 'exitcode': 1, 'stdout': 'out', 'stderr': 'stderr', 'structuredOut': '', 'status': 'FAILED' } def side_effect(command, tmpoutfile, tmperrfile, override_output_files=True, retry=False): return python_execution_result_dict command = copy.deepcopy(self.retryable_command) with patch.object(CustomServiceOrchestrator, "runCommand") as runCommand_mock: runCommand_mock.side_effect = side_effect actionQueue.execute_command(command) #assert that python executor start self.assertTrue(runCommand_mock.called) self.assertEqual(3, runCommand_mock.call_count) self.assertEqual(2, sleep_mock.call_count) sleep_mock.assert_has_calls([call(2), call(3)], False) runCommand_mock.assert_has_calls([ call(command, os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'output-19.txt', os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'errors-19.txt', override_output_files=True, retry=False), call(command, os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'output-19.txt', os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'errors-19.txt', override_output_files=False, retry=True), call(command, os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'output-19.txt', os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'errors-19.txt', override_output_files=False, retry=True)])
def test_create_repo_ubuntu_repo_exists(self, file_mock, execute_mock, tempfile_mock, checked_call_mock): tempfile_mock.return_value = MagicMock(spec=file) tempfile_mock.return_value.__enter__.return_value.name = "/tmp/1.txt" checked_call_mock.return_value = ( 0, "The following signatures couldn't be verified because the public key is not available: NO_PUBKEY 123ABCD", ) with Environment("/") as env: with patch.object(repository, "Template", new=DummyTemplate.create(DEBIAN_DEFAUTL_TEMPLATE)): Repository( "HDP", base_url="http://download.base_url.org/rpm/", repo_file_name="HDP", repo_template="dummy.j2", components=["a", "b", "c"], ) template_item = file_mock.call_args_list[0] template_name = template_item[0][0] template_content = template_item[1]["content"].get_content() self.assertEquals(template_name, "/tmp/1.txt") self.assertEquals(template_content, "deb http://download.base_url.org/rpm/ a b c\n") copy_item = str(file_mock.call_args_list[1]) self.assertEqual(copy_item, "call('/etc/apt/sources.list.d/HDP.list', content=StaticFile('/tmp/1.txt'))") #'apt-get update -qq -o Dir::Etc::sourcelist="sources.list.d/HDP.list" -o APT::Get::List-Cleanup="0"') execute_command_item = execute_mock.call_args_list[0][0][0] self.assertEqual( checked_call_mock.call_args_list[0][0][0], 'apt-get update -qq -o Dir::Etc::sourcelist="sources.list.d/HDP.list" -o APT::Get::List-Cleanup="0"', ) self.assertEqual(execute_command_item, "apt-key adv --recv-keys --keyserver keyserver.ubuntu.com 123ABCD")
def test_create_repo_debian_repo_exists(self, file_mock, execute_mock, tempfile_mock): tempfile_mock.return_value = MagicMock(spec=file) tempfile_mock.return_value.__enter__.return_value.name = "/tmp/1.txt" with Environment('/') as env: with patch.object( repository, "Template", new=DummyTemplate.create(DEBIAN_DEFAUTL_TEMPLATE)): Repository('HDP', base_url='http://download.base_url.org/rpm/', repo_file_name='HDP', repo_template="dummy.j2", components=['a', 'b', 'c']) template_item = file_mock.call_args_list[0] template_name = template_item[0][0] template_content = template_item[1]['content'].get_content() self.assertEquals(template_name, '/tmp/1.txt') self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c\n') copy_item = str(file_mock.call_args_list[1]) self.assertEqual( copy_item, "call('/etc/apt/sources.list.d/HDP.list', content=StaticFile('/tmp/1.txt'))" ) execute_command_item = execute_mock.call_args_list[0][0][0] self.assertEqual( execute_command_item, 'apt-get update -o Dir::Etc::sourcelist="sources.list.d/HDP.list" -o APT::Get::List-Cleanup="0"' )
def test_create_repo_suse(self, file_mock): with Environment("/") as env: with patch.object(repository, "Template", new=DummyTemplate.create(RHEL_SUSE_DEFAULT_TEMPLATE)): Repository( "hadoop", base_url="http://download.base_url.org/rpm/", mirror_list="https://mirrors.base_url.org/?repo=Repository&arch=$basearch", repo_template="dummy.j2", repo_file_name="Repository", ) self.assertTrue("hadoop" in env.resources["Repository"]) defined_arguments = env.resources["Repository"]["hadoop"].arguments expected_arguments = { "repo_template": "dummy.j2", "mirror_list": "https://mirrors.base_url.org/?repo=Repository&arch=$basearch", "base_url": "http://download.base_url.org/rpm/", "repo_file_name": "Repository", } expected_template_arguments = { "mirror_list": "https://mirrors.base_url.org/?repo=Repository&arch=$basearch", "base_url": "http://download.base_url.org/rpm/", "repo_file_name": "Repository", } self.assertEqual(defined_arguments, expected_arguments) self.assertEqual(file_mock.call_args[0][0], "/etc/zypp/repos.d/Repository.repo") template_item = file_mock.call_args[1]["content"] template = str(template_item.name) expected_template_arguments.update({"repo_id": "hadoop"}) self.assertEqual(expected_template_arguments, template_item.context._dict) self.assertEqual("dummy.j2", template)
def executeScript(self, path, classname=None, command=None, config_file=None, config_dict=None, # common mocks for all the scripts config_overrides = None, hdp_stack_version = None, checked_call_mocks = itertools.cycle([(0, "OK.")]), call_mocks = itertools.cycle([(0, "OK.")]), os_type=('Suse','11','Final'), kinit_path_local="/usr/bin/kinit", os_env={'PATH':'/bin'}, target=TARGET_STACKS, mocks_dict={}, try_install=False, command_args=[]): norm_path = os.path.normpath(path) src_dir = RMFTestCase.get_src_folder() if target == self.TARGET_STACKS: stack_version = norm_path.split(os.sep)[0] base_path = os.path.join(src_dir, PATH_TO_STACKS) configs_path = os.path.join(src_dir, PATH_TO_STACK_TESTS, stack_version, "configs") elif target == self.TARGET_CUSTOM_ACTIONS: base_path = os.path.join(src_dir, PATH_TO_CUSTOM_ACTIONS) configs_path = os.path.join(src_dir, PATH_TO_CUSTOM_ACTION_TESTS, "configs") elif target == self.TARGET_COMMON_SERVICES: base_path = os.path.join(src_dir, PATH_TO_COMMON_SERVICES) configs_path = os.path.join(src_dir, PATH_TO_STACK_TESTS, hdp_stack_version, "configs") else: raise RuntimeError("Wrong target value %s", target) script_path = os.path.join(base_path, norm_path) if config_file is not None and config_dict is None: config_file_path = os.path.join(configs_path, config_file) try: with open(config_file_path, "r") as f: self.config_dict = json.load(f) except IOError: raise RuntimeError("Can not read config file: "+ config_file_path) elif config_dict is not None and config_file is None: self.config_dict = config_dict else: raise RuntimeError("Please specify either config_file_path or config_dict parameter") if config_overrides: for key, value in config_overrides.iteritems(): self.config_dict[key] = value self.config_dict = ConfigDictionary(self.config_dict) # append basedir to PYTHONPATH scriptsdir = os.path.dirname(script_path) basedir = os.path.dirname(scriptsdir) sys.path.append(scriptsdir) # get method to execute try: with patch.object(platform, 'linux_distribution', return_value=os_type): script_module = imp.load_source(classname, script_path) script_class_inst = RMFTestCase._get_attr(script_module, classname)() method = RMFTestCase._get_attr(script_class_inst, command) except IOError, err: raise RuntimeError("Cannot load class %s from %s: %s" % (classname, norm_path, err.message))
def assertResourceCalled(self, resource_type, name, **kwargs): with patch.object(UnknownConfiguration, '__getattr__', return_value=lambda: "UnknownConfiguration()"): self.assertNotEqual(len(RMFTestCase.env.resource_list), 0, "There were no more resources executed!") resource = RMFTestCase.env.resource_list.pop(0) self.assertEquals(resource_type, resource.__class__.__name__) self.assertEquals(name, resource.name) self.assertEquals(kwargs, resource.arguments)
def test_create_from_vocabularies(self): """Test that create_from_vocabularies() correctly merges the lists from pythesint """ manager = VocabularyManager() manager.vocabularies = { 'voc1': { 'get_list': MagicMock(), 'update': MagicMock() }, 'voc2': { 'get_list': MagicMock(), 'update': MagicMock() } } with patch.object(manager, 'create_instances') as mock_create_instances, \ patch.object(manager, 'update_and_get_list') as mock_update_and_get_list: mock_update_and_get_list.side_effect = [[{ 'standard_name': 'foo' }], [{ 'standard_name': 'foo' }, { 'standard_name': 'bar' }]] manager.create_from_vocabularies(force=True, versions={ 'voc1': '9.1.5', 'voc2': '10.3' }) mock_update_and_get_list.assert_has_calls([ call(manager.vocabularies['voc1']['get_list'], manager.vocabularies['voc1']['update'], True, version='9.1.5'), call(manager.vocabularies['voc2']['get_list'], manager.vocabularies['voc2']['update'], True, version='10.3') ]) mock_create_instances.assert_called_with([{ 'standard_name': 'foo' }, { 'standard_name': 'bar' }])
def setUp(self): # disable stdout out = StringIO.StringIO() sys.stdout = out # Create config self.config = AmbariConfig().getConfig() # Instantiate CachedHTTPSConnection (skip connect() call) with patch.object(security.VerifiedHTTPSConnection, "connect"): self.cachedHTTPSConnection = security.CachedHTTPSConnection(self.config)
def setUp(self): # disable stdout out = StringIO.StringIO() sys.stdout = out # Create config self.config = AmbariConfig() # Instantiate CachedHTTPSConnection (skip connect() call) with patch.object(security.VerifiedHTTPSConnection, "connect"): self.cachedHTTPSConnection = security.CachedHTTPSConnection(self.config)
def setUp(self): # disable stdout out = StringIO.StringIO() sys.stdout = out # Create config self.config = AmbariConfig() self.config.set('security', 'ssl_verify_cert', '0') # Instantiate CachedHTTPSConnection (skip connect() call) with patch.object(security.VerifiedHTTPSConnection, "connect"): self.cachedHTTPSConnection = security.CachedHTTPSConnection(self.config, "example.com")
def test_has_bluestore_label(self): # patch device.Device __init__ function to do nothing since we want to only test the # low-level behavior of has_bluestore_label with patch.object(device.Device, "__init__", lambda self, path, with_lsm=False: None): disk = device.Device("/dev/sda") disk.abspath = "/dev/sda" with patch('builtins.open', mock_open(read_data=b'bluestore block device\n')): assert disk.has_bluestore_label with patch('builtins.open', mock_open(read_data=b'not a bluestore block device\n')): assert not disk.has_bluestore_label
def assertResourceCalledRegexp(self, resource_type, name, **kwargs): with patch.object(UnknownConfiguration, '__getattr__', return_value=lambda: "UnknownConfiguration()"): self.assertNotEqual(len(RMFTestCase.env.resource_list), 0, "There were no more resources executed!") resource = RMFTestCase.env.resource_list.pop(0) self.assertRegexpMatches(resource.__class__.__name__, resource_type) self.assertRegexpMatches(resource.name, name) for key in set(resource.arguments.keys()) | set(kwargs.keys()): resource_value = resource.arguments.get(key, '') actual_value = kwargs.get(key, '') self.assertRegexpMatches(resource_value, actual_value, msg="Key " + key + " doesn't match")
def setUp(self): self.patcher = patch('geospaas.vocabularies.managers.print') self.mock_print = self.patcher.start() mocked_methods = {} for i, vocabulary_name in enumerate(self.model.objects.vocabularies): mocked_methods[vocabulary_name] = { 'get_list': MagicMock(return_value=self.model_lists[i]), 'update': MagicMock(return_value=None) } methods_patcher = patch.object(self.model.objects, 'vocabularies', mocked_methods) methods_patcher.start() self.addCleanup(methods_patcher.stop)
def main(): if disable_python_and_puppet: with patch.object(PuppetExecutor.PuppetExecutor, 'run_manifest') \ as run_manifest_method: run_manifest_method.side_effect = \ lambda command, file, tmpout, tmperr: { 'exitcode' : 0, 'stdout' : "Simulated run of pp %s" % file, 'stderr' : 'None' } with patch.object(PythonExecutor.PythonExecutor, 'run_file') \ as run_file_py_method: run_file_py_method.side_effect = \ lambda command, file, tmpoutfile, tmperrfile: { 'exitcode' : 0, 'stdout' : "Simulated run of py %s" % file, 'stderr' : 'None' } run_simulation() else: run_simulation()
def testRestoreNothing(self): resp = self.new_mocked_response('get-drive-fm-good-response.json') self.start_mocked_http_server([ responses.Response( method=constants.PATCH, url=('https://server1.ibmc.com/redfish/v1/Chassis/1/Drives' '/HDDPlaneDisk0'), json=resp.json()) ]) with ibmc_client.connect(**self.server) as client: drive = Drive(resp, ibmc_client=client) with patch.object(drive, 'set') as patched_set: drive.restore() self.assertFalse(patched_set.called)
def testRestoreJbodDrive(self): resp = self.new_mocked_response('get-drive-fm-jbod-response.json') self.start_mocked_http_server([ responses.Response( method=constants.PATCH, url=('https://server1.ibmc.com/redfish/v1/Chassis/1/Drives' '/HDDPlaneDisk0'), json=resp.json()) ]) with ibmc_client.connect(**self.server) as client: drive = Drive(resp, ibmc_client=client) with patch.object(drive, 'set') as patched_set: drive.restore() patched_set.assert_called_with( firmware_state=constants.DRIVE_FM_STATE_UNCONFIG_GOOD)
def assertResourceCalledRegexp(self, resource_type, name, **kwargs): with patch.object(UnknownConfiguration, '__getattr__', return_value=lambda: "UnknownConfiguration()"): self.assertNotEqual(len(RMFTestCase.env.resource_list), 0, "There were no more resources executed!") resource = RMFTestCase.env.resource_list.pop(0) self.assertRegexpMatches(resource.__class__.__name__, resource_type) self.assertRegexpMatches(resource.name, name) for key in set(resource.arguments.keys()) | set(kwargs.keys()): resource_value = resource.arguments.get(key, '') actual_value = kwargs.get(key, '') if self.isstring(resource_value): self.assertRegexpMatches(resource_value, actual_value, msg="Key '%s': '%s' does not match with '%s'" % (key, resource_value, actual_value)) else: # check only the type of a custom object self.assertEquals(resource_value.__class__.__name__, actual_value.__class__.__name__)
def testRestoreHotSparedDrive(self): resp = self.new_mocked_response('get-drive-fm-spare-response.json') self.start_mocked_http_server([ responses.Response( method=constants.PATCH, url=('https://server1.ibmc.com/redfish/v1/Chassis/1/Drives' '/HDDPlaneDisk0'), json=resp.json()) ]) with ibmc_client.connect(**self.server) as client: drive = Drive(resp, ibmc_client=client) with patch.object(drive, 'set') as patched_set: drive.restore() patched_set.assert_called_with( hotspare_type=constants.HOT_SPARE_NONE)
def test_recreate_repo_suse(self, filecmp_mock, read_file_mock, checked_call_mock, file_mock, is_redhat_family, is_ubuntu_family, is_suse_family): filecmp_mock.return_value = False is_redhat_family.return_value = False is_ubuntu_family.return_value = False is_suse_family.return_value = True read_file_mock.return_value = "Dummy repo file contents" checked_call_mock.return_value = 0, "Flushing zypper cache" with Environment('/') as env: with patch.object(repository, "__file__", new='/ambari/test/repo/dummy/path/file'): # Check that zypper cache is flushed Repository( 'hadoop', base_url='http://download.base_url.org/rpm/', mirror_list= 'https://mirrors.base_url.org/?repo=Repository&arch=$basearch', repo_template=RHEL_SUSE_DEFAULT_TEMPLATE, repo_file_name='Repository') Repository(None, action="create") self.assertTrue(checked_call_mock.called) expected_repo_file_content = "[hadoop]\nname=hadoop\nmirrorlist=https://mirrors.base_url.org/?repo=Repository&arch=$basearch\n\npath=/\nenabled=1\ngpgcheck=0" template = file_mock.call_args_list[0][1]['content'] self.assertEqual(expected_repo_file_content, template) # Check that if content is equal, zypper cache is not flushed checked_call_mock.reset_mock() filecmp_mock.return_value = True Repository( 'hadoop', base_url='http://download.base_url.org/rpm/', mirror_list= 'https://mirrors.base_url.org/?repo=Repository&arch=$basearch', repo_template=RHEL_SUSE_DEFAULT_TEMPLATE, repo_file_name='Repository') Repository(None, action="create") self.assertFalse(checked_call_mock.called) expected_repo_file_content = "[hadoop]\nname=hadoop\nmirrorlist=https://mirrors.base_url.org/?repo=Repository&arch=$basearch\n\npath=/\nenabled=1\ngpgcheck=0" template = file_mock.call_args_list[0][1]['content'] self.assertEqual(expected_repo_file_content, template)
def test_get_list(self): # TODO - these should be generated from rc file! dicts = ['wkv_variable', 'gcmd_instrument', 'gcmd_science_keyword', 'gcmd_provider', 'gcmd_platform', 'gcmd_location', 'gcmd_horizontalresolutionrange', 'gcmd_verticalresolutionrange', 'gcmd_temporalresolutionrange', 'gcmd_project', 'gcmd_rucontenttype', 'cf_standard_name', 'iso19115_topic_category'] for name in dicts: if name != 'iso19115_topic_category': resource = resource_string(__name__, '../basedata/'+name) with patch.object(pti.json_vocabulary, 'openURL', return_value=resource): function = getattr(pti, 'get_' + name + '_list') self.assertIsInstance(function(), list) else: function = getattr(pti, 'get_' + name + '_list') self.assertIsInstance(function(), list)
def test_create_repo_suse(self, file_mock): with Environment('/') as env: with patch.object( repository, "Template", new=DummyTemplate.create(RHEL_SUSE_DEFAULT_TEMPLATE)): Repository( 'hadoop', base_url='http://download.base_url.org/rpm/', mirror_list= 'https://mirrors.base_url.org/?repo=Repository&arch=$basearch', repo_template="dummy.j2", repo_file_name='Repository') self.assertTrue('hadoop' in env.resources['Repository']) defined_arguments = env.resources['Repository'][ 'hadoop'].arguments expected_arguments = { 'repo_template': 'dummy.j2', 'mirror_list': 'https://mirrors.base_url.org/?repo=Repository&arch=$basearch', 'base_url': 'http://download.base_url.org/rpm/', 'repo_file_name': 'Repository' } expected_template_arguments = { 'mirror_list': 'https://mirrors.base_url.org/?repo=Repository&arch=$basearch', 'base_url': 'http://download.base_url.org/rpm/', 'repo_file_name': 'Repository' } self.assertEqual(defined_arguments, expected_arguments) self.assertEqual(file_mock.call_args[0][0], '/etc/zypp/repos.d/Repository.repo') template_item = file_mock.call_args[1]['content'] template = str(template_item.name) expected_template_arguments.update({'repo_id': 'hadoop'}) self.assertEqual(expected_template_arguments, template_item.context._dict) self.assertEqual('dummy.j2', template)
def assertResourceCalledIgnoreEarlier(self, resource_type, name, **kwargs): """ Fast fowards past earlier resources called, popping them off the list until the specified resource is hit. If it's not found, then an assertion is thrown that there are no more resources. """ with patch.object(UnknownConfiguration, '__getattr__', return_value=lambda: "UnknownConfiguration()"): while len(RMFTestCase.env.resource_list) >= 0: # no more items means exit the loop self.assertNotEqual(len(RMFTestCase.env.resource_list), 0, "The specified resource was not found in the call stack.") # take the next resource and try it out resource = RMFTestCase.env.resource_list.pop(0) try: self.assertEquals(resource_type, resource.__class__.__name__) self.assertEquals(name, resource.name) self.assertEquals(kwargs, resource.arguments) break except AssertionError: pass
def test_enqueue_jobs_dont_skip(self): broker = Standard(None) with patch.object(Standard, 'add_job') as mock_add_job: beat = Beat(broker, 10, False) beat.register_job(Adder) beat.register_job(Divider) expired_job = PeriodicJob.objects.get(pk=2) future_job = PeriodicJob.objects.get(pk=4) future_next_execution = future_job.next_execution self.assertGreater( future_next_execution, datetime.now(pytz.timezone('UTC')) ) beat.enqueue_next_jobs(beat.get_expired_jobs()) expired_job = PeriodicJob.objects.get(pk=2) self.assertEqual( expired_job.next_execution.isoformat(), "2015-03-29T23:01:00+00:00" ) japanese_job = PeriodicJob.objects.get(pk=3) self.assertEqual( japanese_job.next_execution.isoformat(), "2015-07-26T16:10:00+00:00" ) future_job = PeriodicJob.objects.get(pk=4) self.assertEqual( future_job.next_execution, future_next_execution ) self.assertGreater( future_job.next_execution, datetime.now(pytz.timezone('UTC')) ) assert mock_add_job.called_with(num1=1, num2=2, sqjobs_programmed_date='2015-07-26T17:01:00+02:00')
def test_create_repo_ubuntu_doesnt_repo_exist(self, file_mock, execute_mock, tempfile_mock): tempfile_mock.return_value = MagicMock(spec=file) tempfile_mock.return_value.__enter__.return_value.name = "/tmp/1.txt" with Environment('/') as env: with patch.object(repository, "__file__", new='/ambari/test/repo/dummy/path/file'): Repository('HDP', base_url='http://download.base_url.org/rpm/', repo_file_name='HDP', repo_template = DEBIAN_DEFAUTL_TEMPLATE, components = ['a','b','c'] ) call_content = file_mock.call_args_list[0] template_name = call_content[0][0] template_content = call_content[1]['content'] self.assertEquals(template_name, '/tmp/1.txt') self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c') self.assertEqual(file_mock.call_count, 2) self.assertEqual(execute_mock.call_count, 0)
def test_create_repo_debian_doesnt_repo_exist(self, file_mock, execute_mock, tempfile_mock): tempfile_mock.return_value = MagicMock(spec=file) tempfile_mock.return_value.__enter__.return_value.name = "/tmp/1.txt" with Environment('/') as env: with patch.object(repository,"Template", new=DummyTemplate.create(DEBIAN_DEFAUTL_TEMPLATE)): Repository('HDP', base_url='http://download.base_url.org/rpm/', repo_file_name='HDP', repo_template = "dummy.j2", components = ['a','b','c'] ) template_item = file_mock.call_args_list[0] template_name = template_item[0][0] template_content = template_item[1]['content'].get_content() self.assertEquals(template_name, '/tmp/1.txt') self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c\n') self.assertEqual(file_mock.call_count, 1) self.assertEqual(execute_mock.call_count, 0)
def test_enqueue_jobs(self): broker = Standard(None) with patch.object(Standard, 'add_job') as mock_add_job: beat = Beat(broker, 10, True) beat.register_job(Adder) beat.register_job(Divider) expired_job = PeriodicJob.objects.get(pk=2) expired_next_execution = expired_job.next_execution expired_minute = expired_next_execution.minute expired_second = expired_next_execution.second future_job = PeriodicJob.objects.get(pk=4) future_next_execution = future_job.next_execution self.assertGreater(future_next_execution, datetime.now(pytz.timezone('UTC'))) beat.enqueue_next_jobs(beat.get_expired_jobs()) expired_job = PeriodicJob.objects.get(pk=2) self.assertEqual(expired_minute, expired_job.next_execution.minute) self.assertEqual(expired_second, expired_job.next_execution.second) self.assertGreater( expired_job.next_execution, expired_next_execution ) self.assertGreater( expired_job.next_execution, datetime.now(pytz.timezone('UTC')) ) future_job = PeriodicJob.objects.get(pk=4) self.assertEqual(future_job.next_execution, future_next_execution) self.assertGreater( future_job.next_execution, datetime.now(pytz.timezone('UTC')) ) assert mock_add_job.called_with(num1=1, num2=2, sqjobs_programmed_date='2015-07-26T17:01:00+02:00')
def test_execute_retryable_command_with_time_lapse(self, CustomServiceOrchestrator_mock, sleep_mock, time_mock ): CustomServiceOrchestrator_mock.return_value = None dummy_controller = MagicMock() dummy_controller.recovery_manager = RecoveryManager(tempfile.mktemp()) actionQueue = ActionQueue(AmbariConfig(), dummy_controller) python_execution_result_dict = { 'exitcode': 1, 'stdout': 'out', 'stderr': 'stderr', 'structuredOut': '', 'status': 'FAILED' } times_arr = [8, 10, 14, 18, 22, 26, 30, 34] if self.logger.isEnabledFor(logging.INFO): times_arr.insert(0, 4) time_mock.side_effect = times_arr def side_effect(command, tmpoutfile, tmperrfile, override_output_files=True, retry=False): return python_execution_result_dict command = copy.deepcopy(self.retryable_command) with patch.object(CustomServiceOrchestrator, "runCommand") as runCommand_mock: runCommand_mock.side_effect = side_effect actionQueue.execute_command(command) #assert that python executor start self.assertTrue(runCommand_mock.called) self.assertEqual(2, runCommand_mock.call_count) self.assertEqual(1, sleep_mock.call_count) sleep_mock.assert_has_calls([call(1)], False) runCommand_mock.assert_has_calls([ call(command, os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'output-19.txt', os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'errors-19.txt', override_output_files=True, retry=False), call(command, os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'output-19.txt', os.sep + 'tmp' + os.sep + 'ambari-agent' + os.sep + 'errors-19.txt', override_output_files=False, retry=True)])
def test_create_repo_ubuntu_doesnt_repo_exist(self, file_mock, execute_mock, tempfile_mock): tempfile_mock.return_value = MagicMock(spec=file) tempfile_mock.return_value.__enter__.return_value.name = "/tmp/1.txt" with Environment("/") as env: with patch.object(repository, "Template", new=DummyTemplate.create(DEBIAN_DEFAUTL_TEMPLATE)): Repository( "HDP", base_url="http://download.base_url.org/rpm/", repo_file_name="HDP", repo_template="dummy.j2", components=["a", "b", "c"], ) template_item = file_mock.call_args_list[0] template_name = template_item[0][0] template_content = template_item[1]["content"].get_content() self.assertEquals(template_name, "/tmp/1.txt") self.assertEquals(template_content, "deb http://download.base_url.org/rpm/ a b c\n") self.assertEqual(file_mock.call_count, 1) self.assertEqual(execute_mock.call_count, 0)
def test_recreate_repo_suse(self, read_file_mock, checked_call_mock, file_mock, is_redhat_family, is_ubuntu_family, is_suse_family): is_redhat_family.return_value = False is_ubuntu_family.return_value = False is_suse_family.return_value = True read_file_mock.return_value = "Dummy repo file contents" checked_call_mock.return_value = 0, "Flushing zypper cache" with Environment('/') as env: with patch.object(repository, "__file__", new='/ambari/test/repo/dummy/path/file'): # Check that zypper cache is flushed Repository('hadoop', base_url='http://download.base_url.org/rpm/', mirror_list='https://mirrors.base_url.org/?repo=Repository&arch=$basearch', repo_template = RHEL_SUSE_DEFAULT_TEMPLATE, repo_file_name='Repository') self.assertTrue(checked_call_mock.called) expected_repo_file_content = "[hadoop]\nname=hadoop\nmirrorlist=https://mirrors.base_url.org/?repo=Repository&arch=$basearch\n\npath=/\nenabled=1\ngpgcheck=0" template = file_mock.call_args[1]['content'] self.assertEqual(expected_repo_file_content, template) # Check that if content is equal, zypper cache is not flushed checked_call_mock.reset_mock() read_file_mock.return_value = expected_repo_file_content Repository('hadoop', base_url='http://download.base_url.org/rpm/', mirror_list='https://mirrors.base_url.org/?repo=Repository&arch=$basearch', repo_template = RHEL_SUSE_DEFAULT_TEMPLATE, repo_file_name='Repository') self.assertFalse(checked_call_mock.called) expected_repo_file_content = "[hadoop]\nname=hadoop\nmirrorlist=https://mirrors.base_url.org/?repo=Repository&arch=$basearch\n\npath=/\nenabled=1\ngpgcheck=0" template = file_mock.call_args[1]['content'] self.assertEqual(expected_repo_file_content, template)
def patched_aq_execute_command(command): # We have to perform patching for separate thread in the same thread with patch.object(CustomServiceOrchestrator, "runCommand") as runCommand_mock: runCommand_mock.side_effect = side_effect actionQueue.execute_command(command)
def test_post_upgrade_restart(self, time_mock): # load the NN and JN JMX files so that the urllib2.urlopen mock has data # to return num_journalnodes = 3 journalnode_jmx_file = os.path.join(RMFTestCase._getStackTestsFolder(), self.UPGRADE_STACK_VERSION, "configs", "journalnode-upgrade-jmx.json") namenode_jmx_file = os.path.join(RMFTestCase._getStackTestsFolder(), self.UPGRADE_STACK_VERSION, "configs", "journalnode-upgrade-namenode-jmx.json") namenode_status_active_file = os.path.join(RMFTestCase._getStackTestsFolder(), self.UPGRADE_STACK_VERSION, "configs", "journalnode-upgrade-namenode-status-active.json") namenode_status_standby_file = os.path.join(RMFTestCase._getStackTestsFolder(), self.UPGRADE_STACK_VERSION, "configs", "journalnode-upgrade-namenode-status-standby.json") journalnode_jmx = open(journalnode_jmx_file, 'r').read() namenode_jmx = open(namenode_jmx_file, 'r').read() namenode_status_active = open(namenode_status_active_file, 'r').read() namenode_status_standby = open(namenode_status_standby_file, 'r').read() import utils import urllib2 from namenode_ha_state import NamenodeHAState url_stream_mock = MagicMock() url_stream_mock.read.side_effect = (num_journalnodes * [namenode_jmx, journalnode_jmx]) urlopen_mock = MagicMock(return_value = url_stream_mock) #urlopen_mock.return_value = url_stream_mock curl_krb_request_mock = MagicMock(side_effect=(num_journalnodes * [(namenode_jmx, "", 1), (journalnode_jmx, "", 1)])) get_address_mock = MagicMock(return_value="c6406.ambari.apache.org") with patch.object(utils, "curl_krb_request", curl_krb_request_mock): with patch.object(urllib2, "urlopen", urlopen_mock): with patch.object(NamenodeHAState, "get_address", get_address_mock): self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/journalnode.py", classname = "JournalNode", command = "post_upgrade_restart", config_file = "journalnode-upgrade.json", checked_call_mocks = [(0, str(namenode_status_active)), (0, str(namenode_status_standby))], hdp_stack_version = self.UPGRADE_STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) # ensure that the mock was called with the http-style version of the URL urlopen_mock.assert_called urlopen_mock.assert_called_with("http://c6407.ambari.apache.org:8480/jmx") url_stream_mock.reset_mock() curl_krb_request_mock.reset_mock() get_address_mock.reset_mock() # now try with HDFS on SSL with patch.object(utils, "curl_krb_request", curl_krb_request_mock): with patch.object(urllib2, "urlopen", urlopen_mock): with patch.object(NamenodeHAState, "get_address", get_address_mock): self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/journalnode.py", classname = "JournalNode", command = "post_upgrade_restart", config_file = "journalnode-upgrade-hdfs-secure.json", checked_call_mocks = [(0, str(namenode_status_active)), (0, str(namenode_status_standby))], hdp_stack_version = self.UPGRADE_STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) # ensure that the mock was called with the http-style version of the URL curl_krb_request_mock.assert_called curl_krb_request_mock.assert_called_with("/tmp", "/etc/security/keytabs/smokeuser.headless.keytab", "*****@*****.**", "https://c6407.ambari.apache.org:8481/jmx", "jn_upgrade", "/usr/bin/kinit", False, None, "ambari-qa")
import platform import datetime import os import errno import tempfile import sys from unittest import TestCase from mock.mock import patch from ambari_commons import OSCheck, OSConst import os_check_type utils = __import__("ambari_server.utils").utils # We have to use this import HACK because the filename contains a dash with patch("platform.linux_distribution", return_value=("Suse", "11", "Final")): with patch.object(utils, "get_postgre_hba_dir"): ambari_server = __import__("ambari-server") class TestOSCheck(TestCase): @patch("platform.linux_distribution") @patch("os.path.exists") def test_get_os_type(self, mock_exists, mock_linux_distribution): # 1 - Any system mock_exists.return_value = False mock_linux_distribution.return_value = ("my_os", "", "") result = OSCheck.get_os_type() self.assertEquals(result, "my_os") # 2 - Negative case
import socket import platform from mock.mock import patch from mock.mock import MagicMock from mock.mock import create_autospec import ambari_commons from ambari_commons import OSCheck import os from only_for_platform import not_for_platform, get_platform, PLATFORM_WINDOWS, PLATFORM_LINUX if get_platform() != PLATFORM_WINDOWS: os_distro_value = ('Suse','11','Final') else: os_distro_value = ('win2012serverr2','6.3','WindowsServer') with patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)): from ambari_commons.firewall import Firewall from ambari_commons.os_check import OSCheck, OSConst from ambari_agent.HostCheckReportFileHandler import HostCheckReportFileHandler from ambari_agent.HostInfo import HostInfo, HostInfoLinux from ambari_agent.Hardware import Hardware from ambari_agent.AmbariConfig import AmbariConfig from resource_management.core.system import System from resource_management.libraries.functions import packages_analyzer @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) class TestHostInfo(TestCase): @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, 'get_os_family') @patch('resource_management.libraries.functions.packages_analyzer.subprocessWithTimeout')
import datetime import os import errno import tempfile import sys from unittest import TestCase from mock.mock import patch from mock.mock import MagicMock from ambari_commons import OSCheck import os_check_type utils = __import__('ambari_server.utils').utils # We have to use this import HACK because the filename contains a dash with patch("platform.linux_distribution", return_value = ('Suse','11','Final')): with patch.object(OSCheck, "os_distribution", return_value = ('Suse','11','Final')): with patch.object(utils, "get_postgre_hba_dir"): ambari_server = __import__('ambari-server') from ambari_server.serverConfiguration import update_ambari_properties, configDefaults class TestOSCheck(TestCase): @patch.object(OSCheck, "os_distribution") @patch("ambari_commons.os_check._is_oracle_linux") def test_get_os_type(self, mock_is_oracle_linux, mock_linux_distribution): # 1 - Any system mock_is_oracle_linux.return_value = False mock_linux_distribution.return_value = ('my_os', '', '') result = OSCheck.get_os_type()
with description('when given ambiguous input text'): with it('finds multiple words'): a = _acrostic_iter.AcrosticIter(list('superbowl'), tries.ambiguous()) expect(list(a)).to(contain('super bowl', 'superb owl', 'superbowl')) with it('finds multiple words in really long string'): text = 'superbowlwarplanesnapshotscrapbookisnowhere' a = _acrostic_iter.AcrosticIter(list(text), tries.ambiguous()) expect(''.join(next(iter(a)).split())).to(equal(text)) with it('finds same answer quickly'): text = 'superbowlwarplanesnapshotscrapbookisnowhere' a = _acrostic_iter.AcrosticIter(list(text), tries.ambiguous()) first = next(iter(a)) # Result should be cached and '_walk' should never be needed. with patch.object(a, '_walk', side_effect=[]) as mock: second = next(iter(a)) expect(first).to(equal(second)) expect(mock.call_count).to(be_below(10)) with description('with seek sets'): with it('maintains old functionality'): seeking = seek_set.SeekSet(list('superbowl')) a = _acrostic_iter.AcrosticIter(seeking, tries.ambiguous()) expect(list(a)).to(contain('super bowl', 'superb owl', 'superbowl')) with it('supports indexing'): seeking = seek_set.SeekSet(['bad', 'bag', 'ban'], indexes=[1, 2, 3]) a = _acrostic_iter.AcrosticIter(seeking, trie=BA_PREFIX_TRIE) expect(list(a)).to(equal(['ban']))
def patched_aq_execute_command(command): # We have to perform patching for separate thread in the same thread with patch.object(PythonExecutor, "run_file") as runCommand_mock: runCommand_mock.side_effect = side_effect actionQueue.execute_command(command)