def test_fetch(self): """ test for the "fetch" method we verify that: * Input sanitation is performed properly * The request is made to the proper link * The file extension is appended if not in the link (jpg is hardcoded) * The request object returns a valid iterable * Weird image titles are handled properly """ imgobject = imgurpython.helpers.GalleryImage(link=None, title="Neat mountains", description="or not", width=10000, height=10000) # verify that we can only send imgur objects here with self.assertRaises(ValueError): self.fetcher.fetch(None, "filename") with self.assertRaises(ValueError): self.fetcher.fetch("badtype", "filename") # verify that we send a proper filename here with self.assertRaises(ValueError): self.fetcher.fetch(imgobject, None) with self.assertRaises(ValueError): self.fetcher.fetch(imgobject, 10) # check that the request is properly formatted with patch("bg_daemon.fetchers.imgurfetcher.requests.get") as \ mock_method: mock_method.return_value = None imgobject.link = None # we check that the link is properly checked when building # the request with self.assertRaises(ValueError): self.fetcher.fetch(imgobject, "filename.jpg") mock_method.return_value = self.fake_response open_mock = mock_open() with patch("bg_daemon.fetchers.imgurfetcher.open", open_mock, create=True): # Assert that we actually try to write the file self.fetcher.fetch(imgobject, "filename.jpg") open_mock.assert_called_once_with("filename.jpg", "wb") open_mock = mock_open() with patch("bg_daemon.fetchers.imgurfetcher.open", open_mock, create=True): # Assert that it tries to infer a different extension if # not provided imgobject.link = "filename.gif" self.fetcher.fetch(imgobject, "filename") open_mock.assert_called_once_with("filename.gif", "wb")
def eq_ne_test(self,_,tfile1,path1,tfile2,path2,eq_flag): with patch("__builtin__.open", mock_open(read_data=tfile1)) as m: ltor1 = LocalTorrent(path1) with patch("__builtin__.open", mock_open(read_data=tfile2)) as m: ltor2 = LocalTorrent(path2) self.assertEqual(ltor1.__eq__(ltor2), eq_flag) self.assertEqual(ltor1.__ne__(ltor2), not eq_flag)
def open_se(path, mode='r', create=True): if path.endswith('cpuacct/mesos/%s/cpuacct.usage' % task_id): fixture = self.getFixture('cpuacct.usage') m = mock_open(read_data=fixture.getvalue()) m.__enter__.return_value = fixture return m elif path.endswith('cpuacct/mesos/%s/cpuacct.stat' % task_id): fixture = self.getFixture('cpuacct.stat') m = mock_open(read_data=fixture.getvalue()) m.__enter__.return_value = fixture return m elif path.endswith('cpu/mesos/%s/cpu.stat' % task_id): fixture = self.getFixture('cpu.stat') m = mock_open(read_data=fixture.getvalue()) m.__enter__.return_value = fixture return m elif path.endswith('memory/mesos/%s/memory.stat' % task_id): fixture = self.getFixture('memory.stat') m = mock_open(read_data=fixture.getvalue()) m.__enter__.return_value = fixture return m else: patch_open.stop() o = open(path, mode, create) patch_open.start() return o
def testCalculateDeviceMd5Sums_singlePath_linkerWarning(self): # See crbug/479966 test_path = '/storage/emulated/legacy/test/file.dat' device = mock.NonCallableMock() device.adb = mock.NonCallableMock() device.adb.Push = mock.Mock() device_md5sum_output = [ 'WARNING: linker: /data/local/tmp/md5sum/md5sum_bin: ' 'unused DT entry: type 0x1d arg 0x15db', '0123456789abcdeffedcba9876543210 ' '/storage/emulated/legacy/test/file.dat', ] device.RunShellCommand = mock.Mock(return_value=device_md5sum_output) mock_temp_file = mock.mock_open() mock_temp_file.return_value.name = '/tmp/test/script/file.sh' mock_device_temp_file = mock.mock_open() mock_device_temp_file.return_value.name = ( '/data/local/tmp/test/script/file.sh') with mock.patch('tempfile.NamedTemporaryFile', new=mock_temp_file), ( mock.patch('pylib.utils.device_temp_file.DeviceTempFile', new=mock_device_temp_file)): out = md5sum.CalculateDeviceMd5Sums(test_path, device) self.assertEquals(1, len(out)) self.assertTrue('/storage/emulated/legacy/test/file.dat' in out) self.assertEquals('0123456789abcdeffedcba9876543210', out['/storage/emulated/legacy/test/file.dat']) device.adb.Push.assert_called_once_with( '/tmp/test/script/file.sh', '/data/local/tmp/test/script/file.sh') device.RunShellCommand.assert_called_once_with( ['sh', '/data/local/tmp/test/script/file.sh'])
def test_tags_are_shown_in_post(self, _, __): """ Test that tags are actually get to the output. """ # since we're interested in rendered page, let's register # a fake converter for that purpose self.app.add_converter(FakeConverter()) data = textwrap.dedent('''\ --- tags: [tag1, tag2] --- some text''') open_fn = 'holocron.content.open' with mock.patch(open_fn, mock.mock_open(read_data=data), create=True): post = Post('2015/05/23/filename.fake', self.app) self._get_content([post]) with mock.patch(open_fn, mock.mock_open(), create=True) as mopen: post.build() content = mopen().write.call_args[0][0] err = 'Could not find link for #tag1.' self.assertIn('<a href="/mypath/tags/tag1/">#tag1</a>', content, err) err = 'Could not find link for #tag2.' self.assertIn('<a href="/mypath/tags/tag2/">#tag2</a>', content, err)
def test_load_groups_alias(self): """UserImporterLdap.load_groups_alias() tests """ self.importer.groups_alias_file = 'test' # tests valid content aliases = "testAlong testA\n" \ "testBlong testB\n" m_open = mock.mock_open(read_data=aliases) with mock.patch("%s.open" % (module), m_open, create=True): self.importer.load_groups_alias() self.assertEquals(self.importer.groups_alias, {'testAlong': 'testA', 'testBlong': 'testB'}) # tests various invalid content wrong_aliases = ["testAlong", "testBlong testB fail\n", "test:fail", "test;epic"] for wrong_alias in wrong_aliases: m_open = mock.mock_open(read_data=wrong_alias) with mock.patch("%s.open" % (module), m_open, create=True): self.assertRaisesRegexp( HPCStatsSourceError, "Malformed line in alias file test", self.importer.load_groups_alias)
def test_load_task(self, mock_open): input_task = "{'ab': {{test}}}" input_args = "{'test': 2}" # NOTE(boris-42): Such order of files is because we are reading # file with args before file with template. mock_open.side_effect = [ mock.mock_open(read_data="{'test': 1}").return_value, mock.mock_open(read_data=input_task).return_value ] result = self.task._load_task("in_task", task_args_file="in_args_path") self.assertEqual(result, {"ab": 1}) mock_open.side_effect = [ mock.mock_open(read_data=input_task).return_value ] result = self.task._load_task("in_task", task_args=input_args) self.assertEqual(result, {"ab": 2}) mock_open.side_effect = [ mock.mock_open(read_data="{'test': 1}").return_value, mock.mock_open(read_data=input_task).return_value ] result = self.task._load_task("in_task", task_args=input_args, task_args_file="any_file") self.assertEqual(result, {"ab": 2})
def test_yaml_not_loaded(self, mock): '''YAML is not loaded''' import sys mock_open(mock, read_data='foo: status_code: 200') with patch.dict('sys.modules', yaml=None): del sys.modules['yaml'] self.assertRaises(NameError, Validator.load, 'rtd.yaml')
def runErnwin(self, command): self.exitCode = None open_stats = mock_open() open_main = mock_open() try: with patch('sys.stdout', new_callable=StringIO) as mock_stdout: with patch('sys.stderr', new_callable=StringIO) as mock_stderr: with patch('fess.builder.samplingStatisticsNew2.open', open_stats, create=True): with patch('fess.scripts.ernwin_new.open', open_main, create=True): try: ernwin.main(self.parser.parse_args(command.split()[2:])) except SystemExit as e: self.exitCode = e.code if self.exitCode: print("ERNWIN exited with non-zero exit code {}".format(self.exitCode), file=sys.stderr) print("STDERR WAS:", file=sys.stderr) print(mock_stderr.getvalue(), file=sys.stderr) return open_main, open_stats, mock_stdout, mock_stderr except BaseException as e: print('Exception {}:"{}" caught'.format(type(e), e), file=sys.stderr) print("STDERR WAS:", file=sys.stderr) print(mock_stderr.getvalue(), file=sys.stderr) print("STDOUT WAS:", file=sys.stderr) print(mock_stdout.getvalue(), file=sys.stderr) raise
def test_write_queued_seeds(self): mock_file = mock_open() seeds = set(['123_456', '234_567']) with patch.object(builtins, 'open', mock_open()) as mock_file: qsub_store_corsika_data.write_queued_seeds(seeds) mock_file.assert_called_once_with(qsub_store_corsika_data.QUEUED_SEEDS, 'w') mock_file().write.assert_called_once_with('\n'.join(seeds))
def testCalculateDeviceMd5Sums_singlePath(self): test_path = '/storage/emulated/legacy/test/file.dat' device = mock.NonCallableMock() device.adb = mock.NonCallableMock() device.adb.Push = mock.Mock() device_md5sum_output = [ '0123456789abcdeffedcba9876543210 ' '/storage/emulated/legacy/test/file.dat', ] device.RunShellCommand = mock.Mock(return_value=device_md5sum_output) mock_temp_file = mock.mock_open() mock_temp_file.return_value.name = '/tmp/test/script/file.sh' mock_device_temp_file = mock.mock_open() mock_device_temp_file.return_value.name = ( '/data/local/tmp/test/script/file.sh') with mock.patch('tempfile.NamedTemporaryFile', new=mock_temp_file), ( mock.patch('pylib.utils.device_temp_file.DeviceTempFile', new=mock_device_temp_file)): out = md5sum.CalculateDeviceMd5Sums(test_path, device) self.assertEquals(1, len(out)) self.assertEquals('0123456789abcdeffedcba9876543210', out[0].hash) self.assertEquals('/storage/emulated/legacy/test/file.dat', out[0].path) device.adb.Push.assert_called_once_with( '/tmp/test/script/file.sh', '/data/local/tmp/test/script/file.sh') device.RunShellCommand.assert_called_once_with( ['sh', '/data/local/tmp/test/script/file.sh'])
def test_ignore_list(): """Test that `ignore`d errors are not reported in the API.""" function_to_check = textwrap.dedent(''' def function_with_bad_docstring(foo): """ does spacinwithout a period in the end no blank line after one-liner is bad. Also this - """ return foo ''') expected_error_codes = {'D100', 'D400', 'D401', 'D205', 'D209', 'D210', 'D403'} mock_open = mock.mock_open(read_data=function_to_check) from pydocstyle import checker with mock.patch.object( checker.tk, 'open', mock_open, create=True): errors = tuple(checker.check(['filepath'])) error_codes = {error.code for error in errors} assert error_codes == expected_error_codes # We need to recreate the mock, otherwise the read file is empty mock_open = mock.mock_open(read_data=function_to_check) with mock.patch.object( checker.tk, 'open', mock_open, create=True): ignored = {'D100', 'D202', 'D213'} errors = tuple(checker.check(['filepath'], ignore=ignored)) error_codes = {error.code for error in errors} assert error_codes == expected_error_codes - ignored
def test_directoryLayout(self): m = mock_open() dl = self.zp.destdir with patch('__builtin__.open', mock_open(read_data='fake data'), create=True) as m: dl.write() # Write File Handle wfh = m.return_value.__enter__.return_value self.results = wfh.write.call_args_list # Get the specific file calls that make up our writes top = defaults.get('prefix', os.getcwd()) name = DEFAULT_NAME file_calls = [x for x in m.mock_calls if repr(x)[6:].startswith("%s/%s" % (top, name))] self.assertEqual( self.results, [ call( "__import__('pkg_resources').declare_namespace(__name__)\n"), call( "__import__('pkg_resources').declare_namespace(__name__)\n"), call( "__import__('pkg_resources').declare_namespace(__name__)\n"), call('graft ZenPacks\n')]) self.assertEqual( file_calls, [ call('%s/%s/a/__init__.py' % (top, name), 'w'), call('%s/%s/a/b/__init__.py' % (top, name), 'w'), call('%s/%s/a/b/c/__init__.py' % (top, name), 'w'), call('%s/%s/MANIFEST.in' % (top, name), 'w')])
def test_ignore_list(): function_to_check = textwrap.dedent(''' def function_with_bad_docstring(foo): """ does spacinwithout a period in the end no blank line after one-liner is bad. Also this - """ return foo ''') expected_error_codes = set(('D100', 'D400', 'D401', 'D205', 'D209', 'D210', 'D403', 'D405', 'D406')) mock_open = mock.mock_open(read_data=function_to_check) from .. import pydocstyle with mock.patch.object( pydocstyle, 'tokenize_open', mock_open, create=True): errors = tuple(pydocstyle.check(['filepath'])) error_codes = set(error.code for error in errors) assert error_codes == expected_error_codes # We need to recreate the mock, otherwise the read file is empty mock_open = mock.mock_open(read_data=function_to_check) with mock.patch.object( pydocstyle, 'tokenize_open', mock_open, create=True): ignored = set(('D100', 'D202', 'D213')) errors = tuple(pydocstyle.check(['filepath'], ignore=ignored)) error_codes = set(error.code for error in errors) assert error_codes == expected_error_codes - ignored
def test_platform_information_container(self, mock_linux_dist): import sys if sys.version_info >= (3, 0): mocked_fn = 'builtins.open' else: mocked_fn = '__builtin__.open' with mock.patch(mocked_fn, mock.mock_open(read_data="""1:name=systemd:/system.slice \ /docker-39cc1fb.scope"""), create=True) as m: self.assertEqual(('docker', 'docker', 'docker'), ceph_detect_init.platform_information(),) m.assert_called_once_with('/proc/self/cgroup', 'r') with mock.patch(mocked_fn, mock.mock_open(), create=True) as m: m.side_effect = IOError() mock_linux_dist.return_value = ('Red Hat Enterprise Linux Server', '7.3', 'Maipo') # Just run the code to validate the code won't raise IOError ceph_detect_init.platform_information() with mock.patch('os.path.isfile', mock.MagicMock()) as m: m.return_value = True self.assertEqual(('docker', 'docker', 'docker'), ceph_detect_init.platform_information(),) m.assert_called_once_with('/.dockerenv')
def test_upload_certificate_md5(self, mock_makedir, mock_chmod, mock_exists): # wrong file name rv = self.app.put('/' + api_server.VERSION + '/listeners/123/certificates/test.bla', data='TestTest') self.assertEqual(400, rv.status_code) mock_exists.return_value = True m = mock.mock_open() with mock.patch.object(builtins, 'open', m): rv = self.app.put('/' + api_server.VERSION + '/listeners/123/certificates/test.pem', data='TestTest') self.assertEqual(200, rv.status_code) self.assertEqual(OK, json.loads(rv.data.decode('utf-8'))) handle = m() handle.write.assert_called_once_with(six.b('TestTest')) mock_chmod.assert_called_once_with(handle.fileno(), 0o600) mock_exists.return_value = False m = mock.mock_open() with mock.patch.object(builtins, 'open', m): rv = self.app.put('/' + api_server.VERSION + '/listeners/123/certificates/test.pem', data='TestTest') self.assertEqual(200, rv.status_code) self.assertEqual(OK, json.loads(rv.data.decode('utf-8'))) handle = m() handle.write.assert_called_once_with(six.b('TestTest')) mock_makedir.assert_called_once_with('/var/lib/octavia/certs/123')
def test_upload_certificate_md5(self, mock_makedir, mock_chmod, mock_exists): # wrong file name mock_exists.side_effect = [True] rv = self.app.put('/' + api_server.VERSION + '/listeners/123/certificates/test.bla', data='TestTest') self.assertEqual(400, rv.status_code) mock_exists.side_effect = [True, True, True] m = mock.mock_open() with mock.patch('%s.open' % BUILTINS, m, create=True): rv = self.app.put('/' + api_server.VERSION + '/listeners/123/certificates/test.pem', data='TestTest') self.assertEqual(200, rv.status_code) self.assertEqual(OK, json.loads(rv.data.decode('utf-8'))) handle = m() handle.write.assert_called_once_with(six.b('TestTest')) mock_chmod.assert_called_once_with(handle.fileno(), 0o600) mock_exists.side_effect = [True, False] m = mock.mock_open() with mock.patch('%s.open' % BUILTINS, m, create=True): rv = self.app.put('/' + api_server.VERSION + '/listeners/123/certificates/test.pem', data='TestTest') self.assertEqual(200, rv.status_code) self.assertEqual(OK, json.loads(rv.data.decode('utf-8'))) handle = m() mock_makedir.called_once_with('/var/lib/octavia/123')
def test_setup_challenge_cert(self): # This is a helper function that can be used for handling # open context managers more elegantly. It avoids dealing with # __enter__ and __exit__ calls. # http://www.voidspace.org.uk/python/mock/helpers.html#mock.mock_open mock_open, mock_safe_open = mock.mock_open(), mock.mock_open() response = challenges.DVSNIResponse(validation=mock.Mock()) achall = mock.MagicMock() achall.gen_cert_and_response.return_value = (response, "cert", "key") with mock.patch("letsencrypt.plugins.common.open", mock_open, create=True): with mock.patch("letsencrypt.plugins.common.le_util.safe_open", mock_safe_open): # pylint: disable=protected-access self.assertEqual(response, self.sni._setup_challenge_cert( achall, "randomS1")) # pylint: disable=no-member mock_open.assert_called_once_with(self.sni.get_cert_path(achall), "wb") mock_open.return_value.write.assert_called_once_with("cert") mock_safe_open.assert_called_once_with( self.sni.get_key_path(achall), "wb", chmod=0o400) mock_safe_open.return_value.write.assert_called_once_with("key")
def test_upload_keepalived_config(self, mock_remove, mock_subprocess, mock_rename, mock_makedirs, mock_exists): mock_exists.return_value = True m = mock.mock_open() with mock.patch.object(builtins, 'open', m): rv = self.app.put('/' + api_server.VERSION + '/vrrp/upload', data='test') self.assertEqual(200, rv.status_code) mock_exists.return_value = False m = mock.mock_open() with mock.patch.object(builtins, 'open', m): rv = self.app.put('/' + api_server.VERSION + '/vrrp/upload', data='test') self.assertEqual(200, rv.status_code) mock_subprocess.side_effect = subprocess.CalledProcessError(1, 'blah!') with mock.patch.object(builtins, 'open', m): rv = self.app.put('/' + api_server.VERSION + '/vrrp/upload', data='test') self.assertEqual(500, rv.status_code) mock_subprocess.side_effect = [True, subprocess.CalledProcessError(1, 'blah!')] with mock.patch.object(builtins, 'open', m): rv = self.app.put('/' + api_server.VERSION + '/vrrp/upload', data='test') self.assertEqual(500, rv.status_code)
def test_save_and_load_config(self): c = signet.Config(config_dir=fixtures.path('signet'), keyid=TEST_KEYID) c.init_defaults() c['secret_keyring'] = TEST_SECRET_KEYRING self.assertEqual(c['secret_keyring'], TEST_SECRET_KEYRING) c['test'] = True self.assertTrue(c['test']) with patch('os.path.exists', return_value=True): with patch('__builtin__.open', mock_open()) as open_write_mock: c.save() open_write_mock.assert_called_once_with(fixtures.path('signet/config'), 'w') attestation_text = ''.join(call[0][0] for call in open_write_mock.return_value.write.call_args_list) attestation = json.loads(attestation_text) self.assertEqual(attestation['data']['version'], signet.__version__) signet.verify_attestation(attestation, TEST_KEYRING) c2 = signet.Config(config_dir=fixtures.path('signet')) with patch('os.path.exists', return_value=True): with patch('__builtin__.open', mock_open(read_data=attestation_text)) as open_read_mock: c2.load() open_read_mock.assert_called_once_with(fixtures.path('signet/config')) self.assertEqual(c2.config, c.config)
def test_setup_challenge_cert(self): # This is a helper function that can be used for handling # open context managers more elegantly. It avoids dealing with # __enter__ and __exit__ calls. # http://www.voidspace.org.uk/python/mock/helpers.html#mock.mock_open mock_open, mock_safe_open = mock.mock_open(), mock.mock_open() response = challenges.TLSSNI01Response() achall = mock.MagicMock() key = test_util.load_pyopenssl_private_key("rsa512_key.pem") achall.response_and_validation.return_value = ( response, (test_util.load_cert("cert.pem"), key)) with mock.patch("certbot.plugins.common.open", mock_open, create=True): with mock.patch("certbot.plugins.common.util.safe_open", mock_safe_open): # pylint: disable=protected-access self.assertEqual(response, self.sni._setup_challenge_cert( achall, "randomS1")) # pylint: disable=no-member mock_open.assert_called_once_with(self.sni.get_cert_path(achall), "wb") mock_open.return_value.write.assert_called_once_with( test_util.load_vector("cert.pem")) mock_safe_open.assert_called_once_with( self.sni.get_key_path(achall), "wb", chmod=0o400) mock_safe_open.return_value.write.assert_called_once_with( OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key))
def test_upload_then_update(box_client, test_file_path, test_file_content, update_file_content, file_name): with patch('boxsdk.object.folder.open', mock_open(read_data=test_file_content), create=True): file_object = box_client.folder('0').upload(test_file_path, file_name) assert file_object.name == file_name file_object_with_info = file_object.get() assert file_object_with_info.id == file_object.object_id assert file_object_with_info.name == file_name file_content = file_object.content() expected_file_content = test_file_content.encode('utf-8') if isinstance(test_file_content, six.text_type)\ else test_file_content assert file_content == expected_file_content folder_items = box_client.folder('0').get_items(100) assert len(folder_items) == 1 assert folder_items[0].object_id == file_object.object_id assert folder_items[0].name == file_object.name with patch('boxsdk.object.file.open', mock_open(read_data=update_file_content), create=True): updated_file_object = file_object.update_contents(test_file_path) assert updated_file_object.name == file_name file_object_with_info = updated_file_object.get() assert file_object_with_info.id == updated_file_object.object_id assert file_object_with_info.name == file_name file_content = updated_file_object.content() assert file_content == expected_file_content folder_items = box_client.folder('0').get_items(100) assert len(folder_items) == 1 assert folder_items[0].object_id == file_object.object_id assert folder_items[0].name == file_object.name
def test_methods(self, pv_mocks, pv_urls, method): mocker = pv_mocks.OBJ respond_mock = mocker.patch.object(PicovicoRequest, '_PicovicoRequest__respond') pv_req = PicovicoRequest() method_func = getattr(pv_req, method) argument = {} data = None if method not in ('get', 'delete'): if method == 'post': argument.update(post_data='hello') with pytest.raises(AssertionError): method_func(urls.ME, **argument) data = {'k': 'v'} argument.update(post_data=data) else: data = 'putdata' if six.PY2: mocker.patch('picovico.baserequest.open', mock.mock_open(read_data=data)) else: mocker.patch('builtins.open', mock.mock_open(read_data=data)) argument.update(filename='helo') method_func(urls.ME) if not argument else method_func(urls.ME, **argument) respond_mock.assert_called_with(urls.ME) assert pv_req.request_args.method == method assert pv_req.request_args.data == data
def test_should_upgrade_assessment_items(self): # if assessmentitems.version doesn't exist, then return # true with patch("os.path.exists") as exists_method: exists_method.return_value = False self.assertTrue( mod.should_upgrade_assessment_items(), "We told our user not to download assessment items even if they don't have it! Madness!" ) # if the version in assessmentitems.version is less # than our current version, then we should upgrade (return True) assessment_items_mock_version = "0.9.0" with patch('%s.open' % mod.__name__, mock_open(read_data=assessment_items_mock_version), create=True) as mopen: self.assertTrue( mod.should_upgrade_assessment_items(), "We should've told our users to upgrade assessment items, as they have an old version!" ) # we should've also opened the file at least mopen.assert_called_once_with(contentload_settings.KHAN_ASSESSMENT_ITEM_VERSION_PATH) # if the version in assessment items is equal to our current # version, then don't upgrade assessment_items_mock_version = version.SHORTVERSION with patch('%s.open' % mod.__name__, mock_open(read_data=assessment_items_mock_version), create=True) as mopen: self.assertFalse( mod.should_upgrade_assessment_items(), "We should not tell the user to upgrade when we have the same version as assessment items!" ) # we should've also opened the file atleast mopen.assert_called_once_with(contentload_settings.KHAN_ASSESSMENT_ITEM_VERSION_PATH)
def write(self, obj): m = mock_open() with patch('__builtin__.open', mock_open(read_data='Dummy Data'), create=True) as m: obj.write() # Write File Handle wfh = m.return_value.__enter__.return_value self.results = wfh.write.call_args_list
def test_create_config_yaml(self,stream): mock.mock_open(stream); w=StringIO.StringIO() stream().write.side_effect= lambda t: w.write(t) config.create_config() cnf=yaml.load(w.getvalue()) assert cnf.history_file==os.path.join(config.config_folder(),config.HISTORY_FILE) assert cnf.rss_url==config.URL_NOT_SET assert cnf.download_dir==os.path.join(config.config_folder(),config.TORRENTS_DIR)
def test_annotation_load(annot, dset): m = mock_open() with patch.object(__builtin__, 'open', mock_open(read_data=annot)): with open('annotated_fixture.xml', 'w') as fp: fp.read() assert dset.load_annotation('0') assert len(dset.load_annotation('0')) == 4 # keys {boxes, classes, overlaps, flipped} assert dset.load_annotation('0')['boxes'].shape[1] == 4
def test_explicit_mock(self): mock = MagicMock() mock_open(mock) with patch("%s.open" % __name__, mock, create=True) as patched: self.assertIs(patched, mock) open("foo") mock.assert_called_once_with("foo")
def test_is_mounted(self): mount_path = "/var/lib/nova/mnt" source = "192.168.0.1:/nova" proc_with_mnt = """/dev/sda3 / xfs rw,seclabel,attr2,inode64 0 0 tmpfs /tmp tmpfs rw,seclabel 0 0 hugetlbfs /dev/hugepages hugetlbfs rw,seclabel,relatime 0 0 mqueue /dev/mqueue mqueue rw,seclabel,relatime 0 0 debugfs /sys/kernel/debug debugfs rw,seclabel,relatime 0 0 nfsd /proc/fs/nfsd nfsd rw,relatime 0 0 /dev/sda1 /boot ext4 rw,seclabel,relatime,data=ordered 0 0 sunrpc /var/lib/nfs/rpc_pipefs rpc_pipefs rw,relatime 0 0 192.168.0.1:/nova /var/lib/nova/mnt nfs4 rw,relatime,vers=4.1 """ proc_wrong_mnt = """/dev/sda3 / xfs rw,seclabel,attr2,inode64 0 0 tmpfs /tmp tmpfs rw,seclabel 0 0 hugetlbfs /dev/hugepages hugetlbfs rw,seclabel,relatime 0 0 mqueue /dev/mqueue mqueue rw,seclabel,relatime 0 0 debugfs /sys/kernel/debug debugfs rw,seclabel,relatime 0 0 nfsd /proc/fs/nfsd nfsd rw,relatime 0 0 /dev/sda1 /boot ext4 rw,seclabel,relatime,data=ordered 0 0 sunrpc /var/lib/nfs/rpc_pipefs rpc_pipefs rw,relatime 0 0 192.168.0.2:/nova /var/lib/nova/mnt nfs4 rw,relatime,vers=4.1 """ proc_without_mnt = """/dev/sda3 / xfs rw,seclabel,,attr2,inode64 0 0 tmpfs /tmp tmpfs rw,seclabel 0 0 hugetlbfs /dev/hugepages hugetlbfs rw,seclabel,relatime 0 0 mqueue /dev/mqueue mqueue rw,seclabel,relatime 0 0 debugfs /sys/kernel/debug debugfs rw,seclabel,relatime 0 0 nfsd /proc/fs/nfsd nfsd rw,relatime 0 0 /dev/sda1 /boot ext4 rw,seclabel,relatime,data=ordered 0 0 sunrpc /var/lib/nfs/rpc_pipefs rpc_pipefs rw,relatime 0 0 """ with mock.patch.object(os.path, 'ismount') as mock_ismount: # is_mounted(mount_path) with no source is equivalent to # os.path.ismount(mount_path) mock_ismount.return_value = False self.assertFalse(libvirt_utils.is_mounted(mount_path)) mock_ismount.return_value = True self.assertTrue(libvirt_utils.is_mounted(mount_path)) # Source is given, and matches source in /proc/mounts proc_mnt = mock.mock_open(read_data=proc_with_mnt) with mock.patch.object(six.moves.builtins, "open", proc_mnt): self.assertTrue(libvirt_utils.is_mounted(mount_path, source)) # Source is given, and doesn't match source in /proc/mounts proc_mnt = mock.mock_open(read_data=proc_wrong_mnt) with mock.patch.object(six.moves.builtins, "open", proc_mnt): self.assertFalse(libvirt_utils.is_mounted(mount_path, source)) # Source is given, and mountpoint isn't present in /proc/mounts # Note that this shouldn't occur, as os.path.ismount should have # previously returned False in this case. proc_umnt = mock.mock_open(read_data=proc_without_mnt) with mock.patch.object(six.moves.builtins, "open", proc_umnt): self.assertFalse(libvirt_utils.is_mounted(mount_path, source))
def test_setup_with_skip_and_load_lists(self, mock_generate_random_path): # with load_list, but without skip_list load_list = ["tests.foo", "tests.bar"] cfg = {"verifier": self.verifier, "run_args": {"load_list": load_list}} ctx = testr.TestrContext(cfg) mock_open = mock.mock_open() with mock.patch("%s.open" % PATH, mock_open): ctx.setup() mock_open.assert_called_once_with( mock_generate_random_path.return_value, "w") handle = mock_open.return_value handle.write.assert_called_once_with("\n".join(load_list)) self.assertEqualCmd(["--parallel", "--load-list", mock_generate_random_path.return_value], cfg["testr_cmd"]) self.assertFalse(self.verifier.manager.list_tests.called) # with load_list and skip_list load_list = ["tests.foo", "tests.bar"] skip_list = ["tests.foo"] cfg = {"verifier": self.verifier, "run_args": {"load_list": load_list, "skip_list": skip_list}} ctx = testr.TestrContext(cfg) mock_open = mock.mock_open() with mock.patch("%s.open" % PATH, mock_open): ctx.setup() mock_open.assert_called_once_with( mock_generate_random_path.return_value, "w") handle = mock_open.return_value handle.write.assert_called_once_with(load_list[1]) self.assertEqualCmd(["--parallel", "--load-list", mock_generate_random_path.return_value], cfg["testr_cmd"]) self.assertFalse(self.verifier.manager.list_tests.called) # with skip_list, but without load_list load_list = ["tests.foo", "tests.bar"] self.verifier.manager.list_tests.return_value = load_list skip_list = ["tests.foo"] cfg = {"verifier": self.verifier, "run_args": {"skip_list": skip_list}} ctx = testr.TestrContext(cfg) mock_open = mock.mock_open() with mock.patch("%s.open" % PATH, mock_open): ctx.setup() mock_open.assert_called_once_with( mock_generate_random_path.return_value, "w") handle = mock_open.return_value handle.write.assert_called_once_with(load_list[1]) self.assertEqualCmd(["--parallel", "--load-list", mock_generate_random_path.return_value], cfg["testr_cmd"]) self.verifier.manager.list_tests.assert_called_once_with()
def test_main(): fake_soa_dir = '/etc/true/null' file_mock = mock.mock_open() with mock.patch( 'paasta_tools.generate_deployments_for_service.parse_args', return_value=mock.Mock(verbose=False, soa_dir=fake_soa_dir, service='fake_service'), autospec=True, ) as parse_patch, mock.patch( 'os.path.abspath', return_value='ABSOLUTE', autospec=True, ) as abspath_patch, mock.patch( 'paasta_tools.generate_deployments_for_service.get_deploy_group_mappings', return_value=({ 'MAP': { 'docker_image': 'PINGS', 'desired_state': 'start' } }, mock.sentinel.v2_mappings), autospec=True, ) as mappings_patch, mock.patch( 'os.path.join', return_value='JOIN', autospec=True, ) as join_patch, mock.patch( 'builtins.open', file_mock, autospec=None, ) as open_patch, mock.patch( 'json.dump', autospec=True, ) as json_dump_patch, mock.patch( 'json.load', return_value={'OLD_MAP': 'PINGS'}, autospec=True, ) as json_load_patch, mock.patch( 'paasta_tools.generate_deployments_for_service.atomic_file_write', autospec=True, ) as atomic_file_write_patch: generate_deployments_for_service.main() parse_patch.assert_called_once_with() abspath_patch.assert_called_once_with(fake_soa_dir) mappings_patch.assert_called_once_with( soa_dir='ABSOLUTE', service='fake_service', ), join_patch.assert_any_call( 'ABSOLUTE', 'fake_service', generate_deployments_for_service.TARGET_FILE), assert join_patch.call_count == 2 atomic_file_write_patch.assert_called_once_with('JOIN') open_patch.assert_called_once_with('JOIN', 'r') json_dump_patch.assert_called_once_with( { 'v1': { 'MAP': { 'docker_image': 'PINGS', 'desired_state': 'start' }, }, 'v2': mock.sentinel.v2_mappings, }, atomic_file_write_patch.return_value.__enter__.return_value, ) json_load_patch.assert_called_once_with( file_mock.return_value.__enter__.return_value) # test no update to file if content unchanged json_load_patch.return_value = { 'v1': { 'MAP': { 'docker_image': 'PINGS', 'desired_state': 'start' }, }, 'v2': mock.sentinel.v2_mappings, } json_dump_patch.reset_mock() generate_deployments_for_service.main() assert not json_dump_patch.called # test IOError path open_patch.side_effect = IOError generate_deployments_for_service.main() assert json_dump_patch.called
class MetricsTest(unittest.TestCase): """Tests for teadmill.metrics.""" @mock.patch('treadmill.metrics.cgrp_meminfo', mock.Mock( return_value={ 'memory.failcnt': 2, 'memory.limit_in_bytes': 2, 'memory.max_usage_in_bytes': 2, 'memory.memsw.failcnt': 2, 'memory.memsw.limit_in_bytes': 2, })) @mock.patch('treadmill.cgutils.pids_in_cgroup', mock.Mock(return_value=[])) @mock.patch('treadmill.cgroups.get_data', mock.Mock(return_value=_MEM_STATINFO)) def test_read_memory_stats(self): """Tests updating memory stats from cgroups.""" self.assertEqual( metrics.read_memory_stats('treadmill/apps/appname'), { 'memory.failcnt': 2, 'memory.limit_in_bytes': 2, 'memory.max_usage_in_bytes': 2, 'memory.memsw.failcnt': 2, 'memory.memsw.limit_in_bytes': 2, 'memory.stat': { 'active_anon': 0, 'active_file': 0, 'cache': 0, 'hierarchical_memory_limit': 0, 'hierarchical_memsw_limit': 0, 'inactive_anon': 0, 'inactive_file': 0, 'mapped_file': 0, 'pgpgin': 0, 'pgpgout': 0, 'rss': 0, 'swap': 0, 'total_active_anon': 0, 'total_active_file': 0, 'total_cache': 0, 'total_inactive_anon': 0, 'total_inactive_file': 0, 'total_mapped_file': 0, 'total_pgpgin': 0, 'total_pgpgout': 0, 'total_rss': 0, 'total_swap': 0, 'total_unevictable': 0, 'unevictable': 0 } }) @mock.patch('treadmill.cgutils.cpu_usage', mock.Mock(return_value=100)) @mock.patch('treadmill.cgutils.per_cpu_usage', mock.Mock(return_value=[50, 50])) @mock.patch( 'treadmill.cgroups.get_data', mock.Mock(side_effect=[_CPUACCT_STATINFO, _CPU_STATINFO, _CPU_SHARE])) def test_read_cpu_metrics(self): """Tests updating cpu stats from cgroups.""" cpumetrics = metrics.read_cpu_stats('treadmill/apps/appname') self.assertEqual( cpumetrics, { 'cpu.stat': { 'nr_periods': 0, 'nr_throttled': 0, 'throttled_time': 0 }, 'cpuacct.stat': { 'system': 309900720000000, 'user': 183352600000000 }, 'cpuacct.usage': 100, 'cpu.shares': 1024, 'cpuacct.usage_percpu': [50, 50] }) @mock.patch('io.open', mock.mock_open(read_data='1.0 2.0 2.5 12/123 12345\n')) @mock.patch('time.time', mock.Mock(return_value=10)) def test_read_load(self): """Tests reading loadavg.""" self.assertEqual(('1.0', '2.0'), metrics.read_load()) @mock.patch('treadmill.cgroups.get_value', mock.Mock(return_value=2)) def test_cgrp_meminfo(self): """Test the grabbing of cgrp limits""" rv = metrics.cgrp_meminfo('foo') self.assertEqual( rv, { 'memory.failcnt': 2, 'memory.limit_in_bytes': 2, 'memory.max_usage_in_bytes': 2, 'memory.memsw.failcnt': 2, 'memory.memsw.limit_in_bytes': 2, 'memory.memsw.max_usage_in_bytes': 2, 'memory.memsw.usage_in_bytes': 2, 'memory.soft_limit_in_bytes': 2, 'memory.usage_in_bytes': 2 }) @mock.patch( 'treadmill.fs.linux.blk_fs_info', mock.Mock(return_value={ 'block count': '2000', 'free blocks': '1000', 'block size': '1024' })) def test_get_fs_usage(self): """Test the fs usage compute logic.""" self.assertEqual(metrics.get_fs_usage('/dev/treadmill/<uniq>'), {'fs.used_bytes': 1024000}) self.assertEqual(metrics.get_fs_usage(None), {}) def test_calc_fs_usage(self): """Test the fs usage compute logic.""" self.assertEqual(metrics.calc_fs_usage({}), 0)
return ['containerid'] elif command == 'docker commit containerid': return ['sha256:blahsomerandomstringdata'] elif command == 'docker rm -f containerid': return [] raise PyinfraError('Invalid command: {0}'.format(command)) @patch('pyinfra.api.connectors.docker.local.shell', fake_docker_shell) @patch('pyinfra.api.connectors.docker.mkstemp', lambda: (None, '__tempfile__')) @patch('pyinfra.api.connectors.docker.os.remove', lambda f: None) @patch('pyinfra.api.connectors.docker.open', mock_open(read_data='test!'), create=True) @patch('pyinfra.api.util.open', mock_open(read_data='test!'), create=True) class TestDockerConnector(TestCase): def setUp(self): self.fake_popen_patch = patch('pyinfra.api.connectors.util.Popen') self.fake_popen_mock = self.fake_popen_patch.start() def tearDown(self): self.fake_popen_patch.stop() def test_missing_image(self): with self.assertRaises(InventoryError): make_inventory(hosts=('@docker', )) def test_user_provided_container_id(self):
def test_parse(): example = open(os.path.join(HERE, 'pydoctor_example.html')).read() with patch('doc2dash.parsers.pydoctor.open', mock_open(read_data=example), create=True): assert list(PyDoctorParser('foo').parse()) == EXAMPLE_PARSE_RESULT
def mocked_extract(self, some_fun, args): m = mock_open() with patch(MOCKING_FILE_NAME_OPEN, m, create=True): some_fun(*args)
def test_have_effective_read_path(self): with mock.patch('__builtin__.open', mock.mock_open(), create=True): result = file_utils.have_effective_read_access('fake_path') self.assertTrue(result)
import json import os from unittest import TestCase, mock from mock import mock_open from substra.commands import Config, default_config empty_file = mock_open(read_data='') loaded_file = mock_open( read_data='{"default": {"url": "http://127.0.0.1:8000", "version": "0.0"}}' ) custom_loaded_file = mock_open( read_data='{"default": {"url": "http://tutu:8000" "version": "1.0"}}') corrupt_file = mock_open(read_data='tutu') class TestConfig(TestCase): def test_init_config_empty(self): with mock.patch('substra.commands.config.open', empty_file, create=True) as mock_object: res = Config({ '<url>': 'http://127.0.0.1:8000', '<version>': '0.0', }).run() self.assertTrue(res == default_config) self.assertEqual(len(mock_object.call_args_list), 2)
def test_get_major_version(self): with patch.object(builtins, 'open', mock_open(read_data='9.4')): self.assertEqual(get_major_version("data"), 9.4) with patch.object(builtins, 'open', side_effect=OSError): self.assertEqual(get_major_version("data"), 0.0)
def test_parse_haproxy_config(self): # template_tls tls_tupe = sample_configs.sample_tls_container_tuple( certificate='imaCert1', private_key='imaPrivateKey1', primary_cn='FakeCN') rendered_obj = self.jinja_cfg.render_loadbalancer_obj( sample_configs.sample_listener_tuple(proto='TERMINATED_HTTPS', tls=True, sni=True), tls_tupe) m = mock.mock_open(read_data=rendered_obj) with mock.patch('%s.open' % BUILTINS, m, create=True): res = listener._parse_haproxy_file('123') self.assertEqual('TERMINATED_HTTPS', res['mode']) self.assertEqual('/var/lib/octavia/sample_listener_id_1.sock', res['stats_socket']) self.assertEqual( '/var/lib/octavia/certs/sample_listener_id_1/FakeCN.pem', res['ssl_crt']) # render_template_tls_no_sni rendered_obj = self.jinja_cfg.render_loadbalancer_obj( sample_configs.sample_listener_tuple(proto='TERMINATED_HTTPS', tls=True), tls_cert=sample_configs.sample_tls_container_tuple( certificate='ImAalsdkfjCert', private_key='ImAsdlfksdjPrivateKey', primary_cn="FakeCN")) m = mock.mock_open(read_data=rendered_obj) with mock.patch('%s.open' % BUILTINS, m, create=True): res = listener._parse_haproxy_file('123') self.assertEqual('TERMINATED_HTTPS', res['mode']) self.assertEqual(BASE_AMP_PATH + '/sample_listener_id_1.sock', res['stats_socket']) self.assertEqual( BASE_CRT_PATH + '/sample_listener_id_1/FakeCN.pem', res['ssl_crt']) # render_template_http rendered_obj = self.jinja_cfg.render_loadbalancer_obj( sample_configs.sample_listener_tuple()) m = mock.mock_open(read_data=rendered_obj) with mock.patch('%s.open' % BUILTINS, m, create=True): res = listener._parse_haproxy_file('123') self.assertEqual('HTTP', res['mode']) self.assertEqual(BASE_AMP_PATH + '/sample_listener_id_1.sock', res['stats_socket']) self.assertIsNone(res['ssl_crt']) # template_https rendered_obj = self.jinja_cfg.render_loadbalancer_obj( sample_configs.sample_listener_tuple(proto='HTTPS')) m = mock.mock_open(read_data=rendered_obj) with mock.patch('%s.open' % BUILTINS, m, create=True): res = listener._parse_haproxy_file('123') self.assertEqual('TCP', res['mode']) self.assertEqual(BASE_AMP_PATH + '/sample_listener_id_1.sock', res['stats_socket']) self.assertIsNone(res['ssl_crt']) # Bogus format m = mock.mock_open(read_data='Bogus') with mock.patch('%s.open' % BUILTINS, m, create=True): try: res = listener._parse_haproxy_file('123') self.fail("No Exception?") except listener.ParsingError: pass
@patch('sys.executable', None) def test_install_no_python_executable(): with pytest.raises(RuntimeError) as e: _modules.install('git://aws/container-support') assert str( e.value ) == 'Failed to retrieve the real path for the Python executable binary' @contextlib.contextmanager def patch_tmpdir(): yield '/tmp' @patch(builtins_open, mock_open()) @patch('os.path.exists', lambda x: False) def test_prepare(): _modules.prepare('c:/path/to/', 'my-module') open.assert_any_call('c:/path/to/setup.py', 'w') open.assert_any_call('c:/path/to/setup.cfg', 'w') open.assert_any_call('c:/path/to/MANIFEST.in', 'w') data = textwrap.dedent(""" from setuptools import setup setup(packages=[''], name="my-module", version='1.0.0', include_package_data=True) """)
def test_get_major_version(self): with patch.object(builtins, 'open', mock_open(read_data='9.4')): self.assertEqual(self.p.get_major_version(), 90400) with patch.object(builtins, 'open', Mock(side_effect=Exception)): self.assertEqual(self.p.get_major_version(), 0)
def test_write_config(self): config = Config(['aws_okta_keyman.py']) config.clean_config_for_write = mock.MagicMock() config_clean = { 'accounts': [{ 'name': 'Dev', 'appid': 'A123/123' }], 'org': 'example', 'reup': None, 'username': '******', } config.clean_config_for_write.return_value = config_clean config.writepath = './.config/aws_okta_keyman.yml' config.username = '******' config.read_yaml = mock.MagicMock() config.read_yaml.return_value = { 'username': '******', 'org': 'example', 'appid': 'app/id', } m = mock.mock_open() with mock.patch('aws_okta_keyman.config.open', m): config.write_config() m.assert_has_calls([ mock.call(u'./.config/aws_okta_keyman.yml', 'w'), ]) m.assert_has_calls([ mock.call().write('accounts'), mock.call().write(':'), mock.call().write('\n'), mock.call().write('-'), mock.call().write(' '), mock.call().write('appid'), mock.call().write(':'), mock.call().write(' '), mock.call().write('A123/123'), mock.call().write('\n'), mock.call().write(' '), mock.call().write('name'), mock.call().write(':'), mock.call().write(' '), mock.call().write('Dev'), mock.call().write('\n'), mock.call().write('org'), mock.call().write(':'), mock.call().write(' '), mock.call().write('example'), mock.call().write('\n'), mock.call().write('reup'), mock.call().write(':'), mock.call().write(' '), mock.call().write('null'), mock.call().write('\n'), mock.call().write('username'), mock.call().write(':'), mock.call().write(' '), mock.call().write('*****@*****.**'), mock.call().write('\n'), mock.call().flush(), mock.call().flush(), mock.call().__exit__(None, None, None) ])
def test_create_file(self): with mock.patch('__builtin__.open', mock.mock_open(), create=True) as m: service.creat_file("anyfile") m.assert_called_with("anyfile", 'w') wm = m() wm.write.assert_called_with("write file!!!")
def test_check_rc_missing_os_auth(self): with mock.patch('__builtin__.open', mock.mock_open(read_data='test')), \ self.assertRaises(Exception) as context: msg = 'OS_AUTH_URL not defined in {}.'.format(self.rc_file) self.assertTrue(msg in context)
def test_update_iam_policies(self): """Success update documentation""" class TestRuleError(CloudFormationLintRule): """ Def Rule """ id = 'E1000' shortdesc = 'Test Error' description = 'Test Description' source_url = 'https://github.com/aws-cloudformation/cfn-python-lint/' tags = ['resources'] class TestRuleExpiremental(CloudFormationLintRule): """ Def Rule """ id = 'E1001' shortdesc = 'Test Expiremental' description = 'Test Description' source_url = 'https://github.com/aws-cloudformation/cfn-python-lint/' tags = ['resources'] experimental = True class TestRuleWarning(CloudFormationLintRule): """ Def Rule """ id = 'W1001' shortdesc = 'Test Warning' description = 'Test Description' source_url = 'https://github.com/aws-cloudformation/cfn-python-lint/' tags = ['resources', 'iam'] collection = RulesCollection(include_rules=['I'], include_experimental=True) collection.register(TestRuleError()) collection.register(TestRuleWarning()) collection.register(TestRuleExpiremental()) if sys.version_info.major == 3: builtin_module_name = 'builtins' else: builtin_module_name = '__builtin__' mo = mock_open(read_data=self.TEST_TEXT) mo.return_value.__iter__ = lambda self: self mo.return_value.__iter__ = lambda self: iter(self.readline, '') with patch('{}.open'.format(builtin_module_name), mo) as mock_builtin_open: cfnlint.maintenance.update_documentation(collection) expected_calls = [ call('\n'), call('Regular Text\n'), call('## Rules\n'), call( '(_This documentation is generated by running `cfn-lint --update-documentation`, do not alter this manually_)\n\n' ), call( 'The following **{}** rules are applied by this linter:\n\n' .format(len(collection) + 3)), call( '| Rule ID | Title | Description | Config<br />(Name:Type:Default) | Source | Tags |\n' ), call( '| -------- | ----- | ----------- | ---------- | ------ | ---- |\n' ), call( '| E0000<a name="E0000"></a> | Parsing error found when parsing the template | Checks for JSON/YAML formatting errors in your template | | [Source]() | `base` |\n' ), call( '| E0001<a name="E0001"></a> | Error found when transforming the template | Errors found when performing transformation on the template | | [Source]() | `base`,`transform` |\n' ), call( '| E0002<a name="E0002"></a> | Error processing rule on the template | Errors found when processing a rule on the template | | [Source]() | `base`,`rule` |\n' ), call( '| E1000<a name="E1000"></a> | Test Error | Test Description | | [Source](https://github.com/aws-cloudformation/cfn-python-lint/) | `resources` |\n' ), call( '| W1001<a name="W1001"></a> | Test Warning | Test Description | | [Source](https://github.com/aws-cloudformation/cfn-python-lint/) | `resources`,`iam` |\n' ), call('### Experimental rules\n'), call('| Rule ID | Title | Description | Source | Tags |\n'), call('| -------- | ----- | ----------- | ------ | ---- |\n'), call( '| E1001<a name="E1001"></a> | Test Expiremental | Test Description | | [Source](https://github.com/aws-cloudformation/cfn-python-lint/) | `resources` |\n' ), ] mock_builtin_open.return_value.write.assert_has_calls( expected_calls) self.assertEqual(len(expected_calls), mock_builtin_open.return_value.write.call_count)
def test_get_object_reader(self): self.mock_object(__builtin__, 'open', mock.mock_open()) self.driver.get_object_reader(FAKE_CONTAINER, FAKE_OBJECT_NAME) __builtin__.open.assert_called_once_with(FAKE_OBJECT_PATH, 'r')
def open_mock(): m = mock_open() with patch('yandex_tracker_client.collections.open', m, create=True): yield m
def _mock_open(self, monkeypatch, read_data=None): monkeypatch.setattr(xunit_directive, 'open', mock.mock_open(read_data=read_data), raising=False)
class Testtcprecycle(unittest.TestCase): config_file_path = "/etc/sysctl.d/55-tcp_rw_recycle.conf" def setUp(self): self.output = StringIO() def tearDown(self): self.output.close() @mock.patch("subprocess.check_output") def test_detect_noproblem(self, check_output_mock): check_output_mock.return_value = "net.ipv4.tcp_tw_recycle = 0" self.assertFalse(moduletests.src.tcprecycle.detect()) self.assertTrue(check_output_mock.called) @mock.patch("subprocess.check_output") def test_detect_problem(self, check_output_mock): check_output_mock.return_value = "net.ipv4.tcp_tw_recycle = 1" self.assertTrue(moduletests.src.tcprecycle.detect()) self.assertTrue(check_output_mock.called) @mock.patch("subprocess.check_output", side_effect=subprocess.CalledProcessError( "1", "test", "/etc/sysctl.d/55-tcp_rw_recycle.conf: no such file or directory")) def test_fix_cpe(self, check_output_mock): with contextlib.redirect_stdout(self.output): self.assertRaises(subprocess.CalledProcessError, moduletests.src.tcprecycle.fix, self.config_file_path) self.assertTrue(self.output.getvalue().endswith( "[UNFIXED] sysctl -w net.ipv4.tcp_tw_recycle=0 failed for running system\n")) self.assertTrue(check_output_mock.called) @mock.patch("subprocess.check_output") @mock.patch("moduletests.src.tcprecycle.os.path.exists", side_effect=[False]) @mock.patch("moduletests.src.tcprecycle.open", mock.mock_open(read_data="stuff")) def test_fix_exists_sudo_true(self, check_output_mock, exists_mock): check_output_mock.return_value = "True" with contextlib.redirect_stdout(self.output): self.assertTrue(moduletests.src.tcprecycle.fix(self.config_file_path)) self.assertTrue(self.output.getvalue().endswith( "[FIXED] net.ipv4.tcp_tw_recycle=0 for running system\n" "[FIXED] net.ipv4.tcp_tw_recycle=0 in /etc/sysctl.d/55-tcp_rw_recycle.conf\n")) self.assertTrue(check_output_mock.called) self.assertTrue(exists_mock.called) @mock.patch("subprocess.check_output") @mock.patch("moduletests.src.tcprecycle.os.path.exists", side_effect=[True]) @mock.patch("moduletests.src.tcprecycle.open", mock.mock_open(read_data="net.ipv4.tcp_tw_recycle = 0\n" "something else\n")) def test_fix_sudo_true(self, check_output_mock, exists_mock): check_output_mock.return_value = "True" with contextlib.redirect_stdout(self.output): self.assertTrue(moduletests.src.tcprecycle.fix(self.config_file_path)) self.assertTrue(self.output.getvalue().endswith( "[FIXED] net.ipv4.tcp_tw_recycle=0 for running system\n" "[FIXED] net.ipv4.tcp_tw_recycle=0 in /etc/sysctl.d/55-tcp_rw_recycle.conf\n")) self.assertTrue(check_output_mock.called) self.assertTrue(exists_mock.called) @mock.patch("subprocess.check_output") @mock.patch("moduletests.src.tcprecycle.os.path.exists", side_effect=[True]) @mock.patch("moduletests.src.tcprecycle.open", mock.mock_open(read_data="net.ipv4.tcp_tw_recycle = 0\n" "net.ipv4.tcp_tw_recycle = 0\n")) def test_fix_sudo_true_found_twice(self, check_output_mock, exists_mock): check_output_mock.return_value = "True" with contextlib.redirect_stdout(self.output): self.assertTrue(moduletests.src.tcprecycle.fix(self.config_file_path)) self.assertTrue(self.output.getvalue().endswith( "[FIXED] net.ipv4.tcp_tw_recycle=0 for running system\n" "[FIXED] net.ipv4.tcp_tw_recycle=0 in /etc/sysctl.d/55-tcp_rw_recycle.conf\n")) self.assertTrue(check_output_mock.called) self.assertTrue(exists_mock.called) @mock.patch("subprocess.check_output") @mock.patch("moduletests.src.tcprecycle.os.path.exists", side_effect=[False]) @mock.patch("moduletests.src.tcprecycle.open", side_effect=IOError) def test_fix_writefail(self, open_mock, exists_mock, check_output_mock): check_output_mock.return_value = "True" with contextlib.redirect_stdout(self.output): self.assertRaises(IOError, moduletests.src.tcprecycle.fix, self.config_file_path) self.assertTrue(self.output.getvalue().endswith( "[UNFIXED] Failed to write config to /etc/sysctl.d/55-tcp_rw_recycle.conf\n")) self.assertTrue(open_mock.called) self.assertTrue(exists_mock.called) self.assertTrue(check_output_mock.called) @mock.patch("moduletests.src.tcprecycle.get_config_dict") @mock.patch("moduletests.src.tcprecycle.detect", return_value=False) def test_run_success(self, detect_mock, get_config_dict_mock): with contextlib.redirect_stdout(self.output): self.assertTrue(moduletests.src.tcprecycle.run()) self.assertEqual(self.output.getvalue(), "Determining if aggressive TCP recycling is enabled\n" "[SUCCESS] Aggressive TCP recycling is disabled.\n") self.assertTrue(detect_mock.called) self.assertTrue(get_config_dict_mock.called) @mock.patch("moduletests.src.tcprecycle.get_config_dict") @mock.patch("moduletests.src.tcprecycle.detect", return_value=True) def test_run_no_remediate(self, detect_mock, get_config_dict_mock): get_config_dict_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl", "LOG_DIR": "/var/tmp/ec2rl", "BACKED_FILES": dict(), "REMEDIATE": False, "SUDO": True} with contextlib.redirect_stdout(self.output): self.assertFalse(moduletests.src.tcprecycle.run()) self.assertTrue("[UNFIXED] Remediation impossible without sudo and --remediate.\n" "-- Running as root/sudo: True\n" "-- Required --remediate flag specified: False\n" "[FAILURE] Aggressive TCP recycling is enabled." in self.output.getvalue()) self.assertTrue(detect_mock.called) self.assertTrue(get_config_dict_mock.called) @mock.patch("moduletests.src.tcprecycle.get_config_dict") @mock.patch("moduletests.src.tcprecycle.detect", return_value=True) @mock.patch("moduletests.src.tcprecycle.os.path.isfile", return_value=True) @mock.patch("moduletests.src.tcprecycle.backup", return_value=True) @mock.patch("moduletests.src.tcprecycle.fix", return_value=True) @mock.patch("moduletests.src.tcprecycle.restore", return_value=True) def test_run_failure_isfile(self, restore_mock, fix_mock, backup_mock, isfile_mock, detect_mock, get_config_dict_mock): get_config_dict_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl", "LOG_DIR": "/var/tmp/ec2rl", "BACKED_FILES": {self.config_file_path: "/some/path"}, "REMEDIATE": True, "SUDO": True} with contextlib.redirect_stdout(self.output): self.assertFalse(moduletests.src.tcprecycle.run()) self.assertTrue("Determining if aggressive TCP recycling is enabled\n" "[FAILURE] Aggressive TCP recycling is enabled." in self.output.getvalue()) self.assertTrue(restore_mock.called) self.assertTrue(fix_mock.called) self.assertTrue(backup_mock.called) self.assertTrue(isfile_mock.called) self.assertTrue(detect_mock.called) self.assertTrue(get_config_dict_mock.called) @mock.patch("moduletests.src.tcprecycle.get_config_dict") @mock.patch("moduletests.src.tcprecycle.detect", return_value=True) @mock.patch("moduletests.src.tcprecycle.os.path.isfile", return_value=False) @mock.patch("moduletests.src.tcprecycle.fix", return_value=True) def test_run_failure(self, fix_mock, isfile_mock, detect_mock, config_mock): config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl", "LOG_DIR": "/var/tmp/ec2rl", "BACKED_FILES": dict(), "REMEDIATE": True, "SUDO": True} with contextlib.redirect_stdout(self.output): self.assertFalse(moduletests.src.tcprecycle.run()) self.assertTrue("Determining if aggressive TCP recycling is enabled\n" "[FAILURE] Aggressive TCP recycling is enabled." in self.output.getvalue()) self.assertTrue(fix_mock.called) self.assertTrue(isfile_mock.called) self.assertTrue(detect_mock.called) self.assertTrue(config_mock.called) @mock.patch("moduletests.src.tcprecycle.get_config_dict") @mock.patch("moduletests.src.tcprecycle.detect", side_effect=(True, False)) @mock.patch("moduletests.src.tcprecycle.os.path.isfile", return_value=False) @mock.patch("moduletests.src.tcprecycle.fix", return_value=True) def test_run_fix(self, fix_mock, isfile_mock, detect_mock, get_config_dict_mock): get_config_dict_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl", "LOG_DIR": "/var/tmp/ec2rl", "BACKED_FILES": dict(), "REMEDIATE": True, "SUDO": True} with contextlib.redirect_stdout(self.output): self.assertTrue(moduletests.src.tcprecycle.run()) self.assertEqual(self.output.getvalue(), "Determining if aggressive TCP recycling is enabled\n" "[SUCCESS] Aggressive TCP recycling is disabled after remediation. " "Please see the logs for further details\n") self.assertTrue(fix_mock.called) self.assertTrue(isfile_mock.called) self.assertTrue(detect_mock.called) self.assertTrue(get_config_dict_mock.called) @mock.patch("moduletests.src.tcprecycle.get_config_dict") @mock.patch("moduletests.src.tcprecycle.detect", side_effect=Exception) @mock.patch("moduletests.src.tcprecycle.restore", return_value=True) def test_run_exception(self, restore_mock, detect_mock, get_config_dict_mock): get_config_dict_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl", "LOG_DIR": "/var/tmp/ec2rl", "BACKED_FILES": {self.config_file_path: "/some/path"}, "REMEDIATE": True, "SUDO": True} with contextlib.redirect_stdout(self.output): self.assertFalse(moduletests.src.tcprecycle.run()) self.assertTrue(restore_mock.called) self.assertTrue(detect_mock.called) self.assertTrue(get_config_dict_mock.called) @mock.patch("moduletests.src.tcprecycle.get_config_dict", side_effect=IOError) def test_run_failure_config_exception(self, get_config_dict_mock): with contextlib.redirect_stdout(self.output): self.assertFalse(moduletests.src.tcprecycle.run()) self.assertTrue(self.output.getvalue().endswith("Review the logs to determine the cause of the issue.\n")) self.assertTrue(get_config_dict_mock.called)
def _get_config(*args): return mock.mock_open(read_data=yaml.dump(config))()
def test_write_task(self): t = Task1(None) with mock.patch('builtins.open', mock.mock_open()) as open_: t.write_task() open_.assert_called_once_with('tmp/in', 'w') open_().write.assert_called_once_with('task_data')
class TestGlanceImageService(base.TestCase): NOW_GLANCE_OLD_FORMAT = "2010-10-11T10:30:22" NOW_GLANCE_FORMAT = "2010-10-11T10:30:22.000000" NOW_DATETIME = datetime.datetime(2010, 10, 11, 10, 30, 22) def setUp(self): super(TestGlanceImageService, self).setUp() client = stubs.StubGlanceClient() self.context = context.RequestContext(auth_token=True) self.context.user_id = 'fake' self.context.project_id = 'fake' self.service = service.GlanceImageService(client, 1, self.context) self.config(glance_host='localhost', group='glance') try: self.config(auth_strategy='keystone', group='glance') except Exception: opts = [ cfg.StrOpt('auth_strategy', default='keystone'), ] CONF.register_opts(opts) return @staticmethod def _make_fixture(**kwargs): fixture = { 'name': None, 'properties': {}, 'status': None, 'is_public': None } fixture.update(kwargs) return fixture @property def endpoint(self): # For glanceclient versions >= 0.13, the endpoint is located # under http_client (blueprint common-client-library-2) # I5addc38eb2e2dd0be91b566fda7c0d81787ffa75 # Test both options to keep backward compatibility if getattr(self.service.client, 'endpoint', None): endpoint = self.service.client.endpoint else: endpoint = self.service.client.http_client.endpoint return endpoint def _make_datetime_fixture(self): return self._make_fixture(created_at=self.NOW_GLANCE_FORMAT, updated_at=self.NOW_GLANCE_FORMAT, deleted_at=self.NOW_GLANCE_FORMAT) def test_create_with_instance_id(self): # Ensure instance_id is persisted as an image-property. fixture = { 'name': 'test image', 'is_public': False, 'properties': { 'instance_id': '42', 'user_id': 'fake' } } image_id = self.service.create(fixture)['id'] image_meta = self.service.show(image_id) expected = { 'id': image_id, 'name': 'test image', 'is_public': False, 'size': None, 'min_disk': None, 'min_ram': None, 'disk_format': None, 'container_format': None, 'checksum': None, 'created_at': self.NOW_DATETIME, 'updated_at': self.NOW_DATETIME, 'deleted_at': None, 'deleted': None, 'status': None, 'properties': { 'instance_id': '42', 'user_id': 'fake' }, 'owner': None, } self.assertDictEqual(expected, image_meta) image_metas = self.service.detail() self.assertDictEqual(expected, image_metas[0]) def test_create_without_instance_id(self): """Test creating an image without an instance ID. Ensure we can create an image without having to specify an instance_id. Public images are an example of an image not tied to an instance. """ fixture = {'name': 'test image', 'is_public': False} image_id = self.service.create(fixture)['id'] expected = { 'id': image_id, 'name': 'test image', 'is_public': False, 'size': None, 'min_disk': None, 'min_ram': None, 'disk_format': None, 'container_format': None, 'checksum': None, 'created_at': self.NOW_DATETIME, 'updated_at': self.NOW_DATETIME, 'deleted_at': None, 'deleted': None, 'status': None, 'properties': {}, 'owner': None, } actual = self.service.show(image_id) self.assertDictEqual(expected, actual) def test_create(self): fixture = self._make_fixture(name='test image') num_images = len(self.service.detail()) image_id = self.service.create(fixture)['id'] self.assertIsNotNone(image_id) self.assertEqual(num_images + 1, len(self.service.detail())) def test_create_and_show_non_existing_image(self): fixture = self._make_fixture(name='test image') image_id = self.service.create(fixture)['id'] self.assertIsNotNone(image_id) self.assertRaises(exception.ImageNotFound, self.service.show, 'bad image id') def test_detail_private_image(self): fixture = self._make_fixture(name='test image') fixture['is_public'] = False properties = {'owner_id': 'proj1'} fixture['properties'] = properties self.service.create(fixture)['id'] proj = self.context.project_id self.context.project_id = 'proj1' image_metas = self.service.detail() self.context.project_id = proj self.assertEqual(1, len(image_metas)) self.assertEqual('test image', image_metas[0]['name']) self.assertFalse(image_metas[0]['is_public']) def test_detail_marker(self): fixtures = [] ids = [] for i in range(10): fixture = self._make_fixture(name='TestImage %d' % (i)) fixtures.append(fixture) ids.append(self.service.create(fixture)['id']) image_metas = self.service.detail(marker=ids[1]) self.assertEqual(8, len(image_metas)) i = 2 for meta in image_metas: expected = { 'id': ids[i], 'status': None, 'is_public': None, 'name': 'TestImage %d' % (i), 'properties': {}, 'size': None, 'min_disk': None, 'min_ram': None, 'disk_format': None, 'container_format': None, 'checksum': None, 'created_at': self.NOW_DATETIME, 'updated_at': self.NOW_DATETIME, 'deleted_at': None, 'deleted': None, 'owner': None, } self.assertDictEqual(expected, meta) i = i + 1 def test_detail_limit(self): fixtures = [] ids = [] for i in range(10): fixture = self._make_fixture(name='TestImage %d' % (i)) fixtures.append(fixture) ids.append(self.service.create(fixture)['id']) image_metas = self.service.detail(limit=5) self.assertEqual(5, len(image_metas)) def test_detail_default_limit(self): fixtures = [] ids = [] for i in range(10): fixture = self._make_fixture(name='TestImage %d' % (i)) fixtures.append(fixture) ids.append(self.service.create(fixture)['id']) image_metas = self.service.detail() for i, meta in enumerate(image_metas): self.assertEqual(meta['name'], 'TestImage %d' % (i)) def test_detail_marker_and_limit(self): fixtures = [] ids = [] for i in range(10): fixture = self._make_fixture(name='TestImage %d' % (i)) fixtures.append(fixture) ids.append(self.service.create(fixture)['id']) image_metas = self.service.detail(marker=ids[3], limit=5) self.assertEqual(5, len(image_metas)) i = 4 for meta in image_metas: expected = { 'id': ids[i], 'status': None, 'is_public': None, 'name': 'TestImage %d' % (i), 'properties': {}, 'size': None, 'min_disk': None, 'min_ram': None, 'disk_format': None, 'container_format': None, 'checksum': None, 'created_at': self.NOW_DATETIME, 'updated_at': self.NOW_DATETIME, 'deleted_at': None, 'deleted': None, 'owner': None, } self.assertDictEqual(expected, meta) i = i + 1 def test_detail_invalid_marker(self): fixtures = [] ids = [] for i in range(10): fixture = self._make_fixture(name='TestImage %d' % (i)) fixtures.append(fixture) ids.append(self.service.create(fixture)['id']) self.assertRaises(exception.Invalid, self.service.detail, marker='invalidmarker') def test_update(self): fixture = self._make_fixture(name='test image') image = self.service.create(fixture) image_id = image['id'] fixture['name'] = 'new image name' self.service.update(image_id, fixture) new_image_data = self.service.show(image_id) self.assertEqual('new image name', new_image_data['name']) def test_delete(self): fixture1 = self._make_fixture(name='test image 1') fixture2 = self._make_fixture(name='test image 2') fixtures = [fixture1, fixture2] num_images = len(self.service.detail()) self.assertEqual(0, num_images) ids = [] for fixture in fixtures: new_id = self.service.create(fixture)['id'] ids.append(new_id) num_images = len(self.service.detail()) self.assertEqual(2, num_images) self.service.delete(ids[0]) # When you delete an image from glance, it sets the status to DELETED # and doesn't actually remove the image. # Check the image is still there. num_images = len(self.service.detail()) self.assertEqual(2, num_images) # Check the image is marked as deleted. num_images = len( [x for x in self.service.detail() if not x['deleted']]) self.assertEqual(1, num_images) def test_show_passes_through_to_client(self): fixture = self._make_fixture(name='image1', is_public=True) image_id = self.service.create(fixture)['id'] image_meta = self.service.show(image_id) expected = { 'id': image_id, 'name': 'image1', 'is_public': True, 'size': None, 'min_disk': None, 'min_ram': None, 'disk_format': None, 'container_format': None, 'checksum': None, 'created_at': self.NOW_DATETIME, 'updated_at': self.NOW_DATETIME, 'deleted_at': None, 'deleted': None, 'status': None, 'properties': {}, 'owner': None, } self.assertEqual(expected, image_meta) def test_show_raises_when_no_authtoken_in_the_context(self): fixture = self._make_fixture(name='image1', is_public=False, properties={'one': 'two'}) image_id = self.service.create(fixture)['id'] self.context.auth_token = False self.assertRaises(exception.ImageNotFound, self.service.show, image_id) def test_detail_passes_through_to_client(self): fixture = self._make_fixture(name='image10', is_public=True) image_id = self.service.create(fixture)['id'] image_metas = self.service.detail() expected = [ { 'id': image_id, 'name': 'image10', 'is_public': True, 'size': None, 'min_disk': None, 'min_ram': None, 'disk_format': None, 'container_format': None, 'checksum': None, 'created_at': self.NOW_DATETIME, 'updated_at': self.NOW_DATETIME, 'deleted_at': None, 'deleted': None, 'status': None, 'properties': {}, 'owner': None, }, ] self.assertEqual(expected, image_metas) def test_show_makes_datetimes(self): fixture = self._make_datetime_fixture() image_id = self.service.create(fixture)['id'] image_meta = self.service.show(image_id) self.assertEqual(self.NOW_DATETIME, image_meta['created_at']) self.assertEqual(self.NOW_DATETIME, image_meta['updated_at']) def test_detail_makes_datetimes(self): fixture = self._make_datetime_fixture() self.service.create(fixture) image_meta = self.service.detail()[0] self.assertEqual(self.NOW_DATETIME, image_meta['created_at']) self.assertEqual(self.NOW_DATETIME, image_meta['updated_at']) @mock.patch.object(time, 'sleep', autospec=True) def test_download_with_retries(self, mock_sleep): tries = [0] class MyGlanceStubClient(stubs.StubGlanceClient): """A client that fails the first time, then succeeds.""" def get(self, image_id): if tries[0] == 0: tries[0] = 1 raise glance_exc.ServiceUnavailable('') else: return {} stub_client = MyGlanceStubClient() stub_context = context.RequestContext(auth_token=True) stub_context.user_id = 'fake' stub_context.project_id = 'fake' stub_service = service.GlanceImageService(stub_client, 1, stub_context) image_id = 1 # doesn't matter writer = NullWriter() # When retries are disabled, we should get an exception self.config(glance_num_retries=0, group='glance') self.assertRaises(exception.GlanceConnectionFailed, stub_service.download, image_id, writer) # Now lets enable retries. No exception should happen now. tries = [0] self.config(glance_num_retries=1, group='glance') stub_service.download(image_id, writer) self.assertTrue(mock_sleep.called) @mock.patch('sendfile.sendfile', autospec=True) @mock.patch('os.path.getsize', autospec=True) @mock.patch('%s.open' % __name__, new=mock.mock_open(), create=True) def test_download_file_url(self, mock_getsize, mock_sendfile): # NOTE: only in v2 API class MyGlanceStubClient(stubs.StubGlanceClient): """A client that returns a file url.""" s_tmpfname = '/whatever/source' def get(self, image_id): return type('GlanceTestDirectUrlMeta', (object, ), {'direct_url': 'file://%s' + self.s_tmpfname}) stub_context = context.RequestContext(auth_token=True) stub_context.user_id = 'fake' stub_context.project_id = 'fake' stub_client = MyGlanceStubClient() stub_service = service.GlanceImageService(stub_client, context=stub_context, version=2) image_id = 1 # doesn't matter self.config(allowed_direct_url_schemes=['file'], group='glance') # patching open in base_image_service module namespace # to make call-spec assertions with mock.patch('ironic.common.glance_service.base_image_service.open', new=mock.mock_open(), create=True) as mock_ironic_open: with open('/whatever/target', 'w') as mock_target_fd: stub_service.download(image_id, mock_target_fd) # assert the image data was neither read nor written # but rather sendfiled mock_ironic_open.assert_called_once_with(MyGlanceStubClient.s_tmpfname, 'r') mock_source_fd = mock_ironic_open() self.assertFalse(mock_source_fd.read.called) self.assertFalse(mock_target_fd.write.called) mock_sendfile.assert_called_once_with( mock_target_fd.fileno(), mock_source_fd.fileno(), 0, mock_getsize(MyGlanceStubClient.s_tmpfname)) def test_client_forbidden_converts_to_imagenotauthed(self): class MyGlanceStubClient(stubs.StubGlanceClient): """A client that raises a Forbidden exception.""" def get(self, image_id): raise glance_exc.Forbidden(image_id) stub_client = MyGlanceStubClient() stub_context = context.RequestContext(auth_token=True) stub_context.user_id = 'fake' stub_context.project_id = 'fake' stub_service = service.GlanceImageService(stub_client, 1, stub_context) image_id = 1 # doesn't matter writer = NullWriter() self.assertRaises(exception.ImageNotAuthorized, stub_service.download, image_id, writer) def test_client_httpforbidden_converts_to_imagenotauthed(self): class MyGlanceStubClient(stubs.StubGlanceClient): """A client that raises a HTTPForbidden exception.""" def get(self, image_id): raise glance_exc.HTTPForbidden(image_id) stub_client = MyGlanceStubClient() stub_context = context.RequestContext(auth_token=True) stub_context.user_id = 'fake' stub_context.project_id = 'fake' stub_service = service.GlanceImageService(stub_client, 1, stub_context) image_id = 1 # doesn't matter writer = NullWriter() self.assertRaises(exception.ImageNotAuthorized, stub_service.download, image_id, writer) def test_client_notfound_converts_to_imagenotfound(self): class MyGlanceStubClient(stubs.StubGlanceClient): """A client that raises a NotFound exception.""" def get(self, image_id): raise glance_exc.NotFound(image_id) stub_client = MyGlanceStubClient() stub_context = context.RequestContext(auth_token=True) stub_context.user_id = 'fake' stub_context.project_id = 'fake' stub_service = service.GlanceImageService(stub_client, 1, stub_context) image_id = 1 # doesn't matter writer = NullWriter() self.assertRaises(exception.ImageNotFound, stub_service.download, image_id, writer) def test_client_httpnotfound_converts_to_imagenotfound(self): class MyGlanceStubClient(stubs.StubGlanceClient): """A client that raises a HTTPNotFound exception.""" def get(self, image_id): raise glance_exc.HTTPNotFound(image_id) stub_client = MyGlanceStubClient() stub_context = context.RequestContext(auth_token=True) stub_context.user_id = 'fake' stub_context.project_id = 'fake' stub_service = service.GlanceImageService(stub_client, 1, stub_context) image_id = 1 # doesn't matter writer = NullWriter() self.assertRaises(exception.ImageNotFound, stub_service.download, image_id, writer) def test_check_image_service_client_set(self): def func(self): return True self.service.client = True wrapped_func = base_image_service.check_image_service(func) self.assertTrue(wrapped_func(self.service)) @mock.patch.object(glance_client, 'Client', autospec=True) def test_check_image_service__no_client_set_http(self, mock_gclient): def func(service, *args, **kwargs): return (self.endpoint, args, kwargs) endpoint = 'http://123.123.123.123:9292' mock_gclient.return_value.endpoint = endpoint self.service.client = None params = {'image_href': '%s/image_uuid' % endpoint} self.config(auth_strategy='keystone', group='glance') wrapped_func = base_image_service.check_image_service(func) self.assertEqual((endpoint, (), params), wrapped_func(self.service, **params)) mock_gclient.assert_called_once_with( 1, endpoint, **{ 'insecure': CONF.glance.glance_api_insecure, 'token': self.context.auth_token }) @mock.patch.object(glance_client, 'Client', autospec=True) def test_get_image_service__no_client_set_https_insecure( self, mock_gclient): def func(service, *args, **kwargs): return (self.endpoint, args, kwargs) endpoint = 'https://123.123.123.123:9292' mock_gclient.return_value.endpoint = endpoint self.service.client = None params = {'image_href': '%s/image_uuid' % endpoint} self.config(auth_strategy='keystone', group='glance') self.config(glance_api_insecure=True, group='glance') wrapped_func = base_image_service.check_image_service(func) self.assertEqual((endpoint, (), params), wrapped_func(self.service, **params)) mock_gclient.assert_called_once_with( 1, endpoint, **{ 'insecure': CONF.glance.glance_api_insecure, 'token': self.context.auth_token }) @mock.patch.object(glance_client, 'Client', autospec=True) def test_get_image_service__no_client_set_https_secure(self, mock_gclient): def func(service, *args, **kwargs): return (self.endpoint, args, kwargs) endpoint = 'https://123.123.123.123:9292' mock_gclient.return_value.endpoint = endpoint self.service.client = None params = {'image_href': '%s/image_uuid' % endpoint} self.config(auth_strategy='keystone', group='glance') self.config(glance_api_insecure=False, group='glance') self.config(glance_cafile='/path/to/certfile', group='glance') wrapped_func = base_image_service.check_image_service(func) self.assertEqual((endpoint, (), params), wrapped_func(self.service, **params)) mock_gclient.assert_called_once_with( 1, endpoint, **{ 'cacert': CONF.glance.glance_cafile, 'insecure': CONF.glance.glance_api_insecure, 'token': self.context.auth_token })
class UnpackAssessmentZipCommandTests(KALiteTestCase): def setUp(self): reset_content_db() # Create a dummy assessment item zip _, self.zipfile_path = tempfile.mkstemp() with open(self.zipfile_path, "w") as f: zf = zipfile.ZipFile(f, "w") zf.writestr("assessmentitems.version", version.SHORTVERSION) zf.close() def tearDown(self): os.unlink(self.zipfile_path) @patch("%s.open" % mod.__name__, mock_open(), create=True) @patch.object(requests, "get") @patch.object(mod, "should_upgrade_assessment_items") def test_command_should_skip(self, upgrade_method, get_method): upgrade_method.return_value = False # test that we don't update when given a url url = "http://fakeurl.com/test.zip" call_command("unpack_assessment_zip", url) self.assertEqual( get_method.call_count, 0, "requests.get was called even if we should've skipped!") filename = "/fake/file/somewhere.zip" call_command("unpack_assessment_zip", filename) self.assertEqual(mod.open.call_count, 0, "open was called even if we should've skipped!") @unittest.skipIf(os.environ.get('CIRCLECI', False), "Skipping on Circle CI") @patch.object(requests, "get", autospec=True) def test_command_with_url(self, get_method): # Skipped because of concurrency issues when running url = "http://fakeurl.com/test.zip" with open(self.zipfile_path) as f: zip_raw_data = f.read() zf = zipfile.ZipFile(StringIO.StringIO(zip_raw_data)) get_method.return_value.iter_content = MagicMock( return_value=zip_raw_data) call_command( "unpack_assessment_zip", url, force_download= True # always force the download, so we can be sure the get method gets called ) get_method.assert_called_once_with(url, stream=True) # verify that the other items are written to the content directory for filename in zf.namelist(): # already verified above; no need to double-dip if "assessmentitems" in filename: continue else: filename_path = os.path.join(mod.CONTENT_ROOT, filename) self.assertTrue( os.path.exists(filename_path), "%s wasn't extracted to %s" % (filename, mod.CONTENT_ROOT)) def test_command_with_local_path(self): pass
def open_mock(content, **kwargs): m = mock_open(read_data=content) with patch('__builtin__.open', m, create=True, **kwargs) as m: yield m
class FileItemResourceTest(ResourceTestCaseMixin, ExchangeTest): def setUp(self): super(FileItemResourceTest, self).setUp() # turn on streaming_support so that test_view can test the # view endpoint # without streaming_supported set to True, view endpoint # will behave exactly like download settings.FILESERVICE_CONFIG['streaming_supported'] = True self.image_filename = 'image.jpg' self.image_file = SimpleUploadedFile( name=self.image_filename, content='', content_type='image/jpg', ) self.upload_url = '/api/fileservice/' self.download_url_template = '/api/fileservice/download/{0}' self.view_url_template = '/api/fileservice/view/{0}' @mock.patch('exchange.fileservice.api.open', mock_open()) def test_upload(self): self.login() resp = self.client.post(self.upload_url, {'file': self.image_file}, follow=True) self.assertHttpCreated(resp) @mock.patch('exchange.fileservice.api.serve') @mock.patch('exchange.fileservice.api.os.path.isfile') def test_download(self, isfile_mock, serve_mock): isfile_mock.return_value = True serve_mock.return_value = HttpResponse('Empty Response') self.login() resp = self.client.get(self.download_url_template.format( self.image_filename), follow=True) self.assertEquals( resp.get('Content-Disposition'), 'attachment; filename="{}"'.format(self.image_filename)) @mock.patch('exchange.fileservice.api.os.path.isfile') def test_download_not_found(self, isfile_mock): isfile_mock.return_value = False self.login() resp = self.client.get(self.download_url_template.format( self.image_filename), follow=True) self.assertHttpNotFound(resp) def test_view(self): self.login() resp = self.client.get(self.view_url_template.format( self.image_filename), follow=True) ''' the view end point is meant for playing back video with random access which means the progress indicator can be dragged around. FO the random access to work properly, instead of django serving up the video, nginx or apache have to serve it up and the fileservice adds the 'X-Sendfile' and the equivalent 'X-Accel-Redirect' so that they take it from there. Even if that happens, at least one of the headers should technically be present. ''' self.assertTrue(resp.get('X-Sendfile') or resp.get('X-Accel-Redirect')) def test_upload_whitelist(self): settings.FILESERVICE_CONFIG['types_allowed'] = ['.txt'] self.login() resp = self.client.post(self.upload_url, {'file': self.image_file}, follow=True) self.assertHttpBadRequest(resp) @mock.patch('exchange.fileservice.helpers.get_fileservice_files') def test_statics(self, get_fileservice_files_mock): get_fileservice_files_mock.return_value = ['a.jpg', 'b.jpg'] item = FileItemResource.get_file_item_by_name('a.jpg') self.assertTrue(item.name == 'a.jpg')
self.assertFalse(result) @mock.patch.object( results2.taskqueue, 'add', mock.MagicMock(side_effect=taskqueue.TaskAlreadyExistsError)) def testScheduleResults2Generation2_AlreadyRunning(self): job = _JobStub(_JOB_WITH_DIFFERENCES, '123') result = results2.ScheduleResults2Generation(job) self.assertTrue(result) @mock.patch.object(results2, 'open', mock.mock_open(read_data='fake_viewer'), create=True) class GenerateResults2Test(testing_common.TestCase): @mock.patch.object(results2, '_FetchHistogramsDataFromJobData', mock.MagicMock(return_value=['a', 'b'])) @mock.patch.object(results2, '_GcsFileStream', mock.MagicMock()) @mock.patch.object(results2.render_histograms_viewer, 'RenderHistogramsViewer') def testPost_Renders(self, mock_render): job = _JobStub(_JOB_NO_DIFFERENCES, '123') results2.GenerateResults2(job) mock_render.assert_called_with(['a', 'b'], mock.ANY, reset_results=True, vulcanized_html='fake_viewer')
class LoadTests(unittest.TestCase): """Unit tests for :func:`fedora_messaging.config.load`.""" def test_deep_copy(self): """Assert nested dictionaries in DEFAULTS are not copied into the config instance.""" config = msg_config.LazyConfig().load_config() config["queues"]["somequeue"] = {} self.assertNotIn("somequeue", msg_config.DEFAULTS["queues"]) @mock.patch("fedora_messaging.config._log", autospec=True) @mock.patch("fedora_messaging.config.os.path.exists", return_value=False) def test_missing_config_file(self, mock_exists, mock_log): """Assert loading the config with a missing file works.""" config = msg_config.LazyConfig().load_config() self.assertEqual(msg_config.DEFAULTS, config) mock_exists.assert_called_once_with( "/etc/fedora-messaging/config.toml") mock_log.info.assert_called_once_with( "The configuration file, /etc/fedora-messaging/config.toml, does not exist." ) @mock.patch("fedora_messaging.config.open", mock.mock_open(read_data='bad_key = "val"')) @mock.patch("fedora_messaging.config.os.path.exists", return_value=True) def test_override_client_props(self, mock_exists): """Assert overriding reserved keys in client properties fails.""" conf = '[client_properties]\n{} = "val"' for key in ("version", "information", "product"): with mock.patch( "fedora_messaging.config.open", mock.mock_open(read_data=conf.format(key)), ): config = msg_config.LazyConfig() self.assertRaises(ConfigurationException, config.load_config) @mock.patch("fedora_messaging.config.open", mock.mock_open(read_data='bad_key = "val"')) @mock.patch("fedora_messaging.config.os.path.exists", return_value=True) def test_invalid_key(self, mock_exists): """Assert an unknown config key raises an exception.""" config = msg_config.LazyConfig() self.assertRaises(ConfigurationException, config.load_config) @mock.patch("fedora_messaging.config.open", mock.mock_open(read_data="Ni!")) @mock.patch("fedora_messaging.config.os.path.exists", return_value=True) def test_bad_config_file(self, mock_exists): """Assert an invalid TOML file raises a ConfigurationException.""" with self.assertRaises(ConfigurationException) as cm: msg_config.LazyConfig().load_config() error = ( "Failed to parse /etc/fedora-messaging/config.toml: error at line 1, column 3: " "Found invalid character in key name: '!'. Try quoting the key name." ) self.assertEqual(error, cm.exception.message) @mock.patch("fedora_messaging.config.open", mock.mock_open(read_data=partial_config)) @mock.patch("fedora_messaging.config._log", autospec=True) @mock.patch("fedora_messaging.config.os.path.exists", return_value=True) def test_partial_config_file(self, mock_exists, mock_log): """Assert a config file that uses a subset of keys works as expected""" config = msg_config.LazyConfig().load_config() self.assertNotEqual("special_exchange", msg_config.DEFAULTS["publish_exchange"]) self.assertEqual("special_exchange", config["publish_exchange"]) mock_exists.assert_called_once_with( "/etc/fedora-messaging/config.toml") mock_log.info.assert_called_once_with( "Loading configuration from /etc/fedora-messaging/config.toml") self.assertEqual(0, mock_log.warning.call_count) @mock.patch("fedora_messaging.config.open", mock.mock_open(read_data=full_config)) @mock.patch("fedora_messaging.config._log", autospec=True) @mock.patch("fedora_messaging.config.os.path.exists", return_value=True) def test_full_config_file(self, mock_exists, mock_log): """Assert a config with the full set of configurations loads correctly.""" expected_config = dict( amqp_url="amqp://*****:*****@rabbit-server1:5672/%2F", client_properties={ "app": "Example App", "product": "Fedora Messaging with Pika", "information": "https://fedora-messaging.readthedocs.io/en/stable/", "version": msg_config.DEFAULTS["client_properties"]["version"], }, topic_prefix="", publish_exchange="special_exchange", passive_declares=False, exchanges={ "custom_exchange": { "type": "fanout", "durable": False, "auto_delete": False, "arguments": {}, } }, queues={ "my_queue": { "durable": True, "auto_delete": False, "exclusive": False, "arguments": {}, } }, bindings=[{ "queue": "my_queue", "exchange": "amq.topic", "routing_keys": ["#"] }], qos={ "prefetch_size": 25, "prefetch_count": 25 }, callback="fedora_messaging.examples:print_msg", consumer_config={"example_key": "for my consumer"}, tls={ "ca_cert": "/etc/pki/tls/certs/ca-bundle.crt", "keyfile": "/my/client/key.pem", "certfile": "/my/client/cert.pem", }, log_config={ "version": 1, "disable_existing_loggers": True, "formatters": { "simple": { "format": "[%(name)s %(levelname)s] %(message)s" } }, "handlers": { "console": { "class": "logging.StreamHandler", "formatter": "simple", "stream": "ext://sys.stderr", } }, "loggers": { "fedora_messaging": { "level": "INFO", "propagate": False, "handlers": ["console"], } }, "root": { "level": "DEBUG", "handlers": ["console"] }, }, ) config = msg_config.LazyConfig().load_config() self.assertEqual(sorted(expected_config.keys()), sorted(config.keys())) for key in expected_config: self.assertEqual(expected_config[key], config[key]) mock_exists.assert_called_once_with( "/etc/fedora-messaging/config.toml") mock_log.info.assert_called_once_with( "Loading configuration from /etc/fedora-messaging/config.toml") self.assertEqual(0, mock_log.warning.call_count) @mock.patch("fedora_messaging.config.open", mock.mock_open(read_data=partial_config)) @mock.patch.dict("fedora_messaging.config.os.environ", {"FEDORA_MESSAGING_CONF": "/my/config"}) @mock.patch("fedora_messaging.config._log", autospec=True) @mock.patch("fedora_messaging.config.os.path.exists", return_value=True) def test_custom_config_file(self, mock_exists, mock_log): """Assert using the environment variable to set the config path works.""" config = msg_config.LazyConfig().load_config() self.assertNotEqual("special_exchange", msg_config.DEFAULTS["publish_exchange"]) self.assertEqual("special_exchange", config["publish_exchange"]) mock_exists.assert_called_once_with("/my/config") mock_log.info.assert_called_once_with( "Loading configuration from /my/config") self.assertEqual(0, mock_log.warning.call_count) @mock.patch("fedora_messaging.config.open", mock.mock_open(read_data=empty_config)) @mock.patch("fedora_messaging.config._log", autospec=True) @mock.patch("fedora_messaging.config.os.path.exists", return_value=True) def test_empty_config_file(self, mock_exists, mock_log): """Assert loading the config with an empty file that exists works.""" config = msg_config.LazyConfig().load_config() self.assertEqual(msg_config.DEFAULTS, config) mock_exists.assert_called_once_with( "/etc/fedora-messaging/config.toml") mock_log.info.assert_called_once_with( "Loading configuration from /etc/fedora-messaging/config.toml") @mock.patch("fedora_messaging.config.open", mock.mock_open(read_data=empty_config)) @mock.patch("fedora_messaging.config.logging.config.dictConfig", autospec=True) @mock.patch("fedora_messaging.config.os.path.exists", return_value=True) def test_setup_logging(self, mock_exists, mock_dictConfig): """Assert setup_logging passes the log_config key to dictConfig.""" config = msg_config.LazyConfig().load_config() config.setup_logging() mock_dictConfig.assert_called_once_with(config["log_config"]) @mock.patch("fedora_messaging.config.open", mock.mock_open(read_data=empty_config)) @mock.patch("fedora_messaging.config._log", autospec=True) @mock.patch("fedora_messaging.config.os.path.exists", return_value=True) def test_load_on_copy(self, mock_exists, mock_log): """Assert the config is loaded when copy is called.""" config = msg_config.LazyConfig() copy = config.copy() self.assertEqual(msg_config.DEFAULTS, copy) mock_exists.assert_called_once_with( "/etc/fedora-messaging/config.toml") mock_log.info.assert_called_once_with( "Loading configuration from /etc/fedora-messaging/config.toml") @mock.patch("fedora_messaging.config.open", mock.mock_open(read_data=empty_config)) @mock.patch("fedora_messaging.config._log", autospec=True) @mock.patch("fedora_messaging.config.os.path.exists", return_value=True) def test_load_on_get(self, mock_exists, mock_log): """Assert the config is loaded when get is called.""" config = msg_config.LazyConfig() self.assertEqual(msg_config.DEFAULTS["callback"], config.get("callback")) self.assertEqual(msg_config.DEFAULTS["amqp_url"], config.get("amqp_url")) mock_exists.assert_called_once_with( "/etc/fedora-messaging/config.toml") mock_log.info.assert_called_once_with( "Loading configuration from /etc/fedora-messaging/config.toml") def test_explode_on_pop(self): """Assert calling pop raises an exception.""" config = msg_config.LazyConfig() self.assertRaises(ConfigurationException, config.pop) @mock.patch("fedora_messaging.config.open", mock.mock_open(read_data=empty_config)) @mock.patch("fedora_messaging.config._log", autospec=True) @mock.patch("fedora_messaging.config.os.path.exists", return_value=True) def test_load_on_update(self, mock_exists, mock_log): """Assert the config is loaded when update is called.""" config = msg_config.LazyConfig() config.update({}) self.assertEqual(msg_config.DEFAULTS, config) mock_exists.assert_called_once_with( "/etc/fedora-messaging/config.toml") mock_log.info.assert_called_once_with( "Loading configuration from /etc/fedora-messaging/config.toml") @mock.patch("fedora_messaging.config.open", mock.mock_open(read_data=empty_config)) @mock.patch("fedora_messaging.config._log", autospec=True) @mock.patch("fedora_messaging.config.os.path.exists", return_value=True) def test_load_on_setup_logging(self, mock_exists, mock_log): """Assert the config is loaded when setup_logging is called.""" config = msg_config.LazyConfig() config.setup_logging() self.assertEqual(msg_config.DEFAULTS, config) mock_exists.assert_called_once_with( "/etc/fedora-messaging/config.toml") mock_log.info.assert_called_once_with( "Loading configuration from /etc/fedora-messaging/config.toml") def test_load_on_get_item(self): """Assert load_config is called when __getitem__ is invoked.""" config = msg_config.LazyConfig() config.load_config = mock.Mock() try: config["some_key"] except KeyError: pass config.load_config.assert_called_once_with()
def _call(cls, *args, **kwargs): # pylint: disable=unused-argument from certbot._internal.log import pre_arg_parse_setup with mock.patch('builtins.open', mock.mock_open()): return pre_arg_parse_setup()
def test_read_postmaster_pidfile(self): with patch.object(builtins, 'open', Mock(side_effect=IOError)): self.assertIsNone(PostmasterProcess.from_pidfile('')) with patch.object(builtins, 'open', mock_open(read_data='123\n')): self.assertIsNone(PostmasterProcess.from_pidfile(''))
class LinuxSysinfoTest(unittest.TestCase): """treadmill.sysinfo test.""" PROC_MEMINFO = """ MemTotal: 7992596 kB MemFree: 3572940 kB Buffers: 202564 kB Cached: 2371108 kB SwapCached: 0 kB Active: 2959388 kB Inactive: 868476 kB HighTotal: 0 kB HighFree: 0 kB LowTotal: 7992596 kB LowFree: 3572940 kB SwapTotal: 4064436 kB SwapFree: 4064436 kB Dirty: 240 kB Writeback: 0 kB AnonPages: 1254148 kB Mapped: 104244 kB Slab: 500152 kB PageTables: 17180 kB NFS_Unstable: 0 kB Bounce: 0 kB CommitLimit: 11257772 kB Committed_AS: 2268028 kB VmallocTotal: 34359738367 kB VmallocUsed: 335508 kB VmallocChunk: 34359375019 kB HugePages_Total: 0 HugePages_Free: 0 HugePages_Rsvd: 0 Hugepagesize: 2048 kB """ CPUINFO = """ processor : 0 vendor_id : GenuineIntel cpu family : 6 model : 58 model name : Intel(R) Core(TM) i5-3470 CPU @ 3.20GHz stepping : 9 cpu MHz : 1600.000 cache size : 6144 KB physical id : 0 siblings : 4 core id : 0 cpu cores : 4 apicid : 0 fpu : yes fpu_exception : yes cpuid level : 13 wp : yes flags : fpu vme de pse bogomips : 6385.66 clflush size : 64 cache_alignment : 64 address sizes : 36 bits physical, 48 bits virtual power management: [8] processor : 1 vendor_id : GenuineIntel cpu family : 6 model : 58 model name : Intel(R) Core(TM) i5-3470 CPU @ 3.20GHz stepping : 9 cpu MHz : 1600.000 cache size : 6144 KB physical id : 0 siblings : 4 core id : 1 cpu cores : 4 apicid : 2 fpu : yes fpu_exception : yes cpuid level : 13 wp : yes flags : fpu vme de pse bogomips : 6384.64 clflush size : 64 cache_alignment : 64 address sizes : 36 bits physical, 48 bits virtual power management: [8] processor : 2 vendor_id : GenuineIntel cpu family : 6 model : 58 model name : Intel(R) Core(TM) i5-3470 CPU @ 3.20GHz stepping : 9 cpu MHz : 1600.000 cache size : 6144 KB physical id : 0 siblings : 4 core id : 2 cpu cores : 4 apicid : 4 fpu : yes fpu_exception : yes cpuid level : 13 wp : yes flags : fpu vme de pse bogomips : 6385.26 clflush size : 64 cache_alignment : 64 address sizes : 36 bits physical, 48 bits virtual power management: [8] processor : 3 vendor_id : GenuineIntel cpu family : 6 model : 58 model name : Intel(R) Core(TM) i5-3470 CPU @ 3.20GHz stepping : 9 cpu MHz : 1600.000 cache size : 6144 KB physical id : 0 siblings : 4 core id : 3 cpu cores : 4 apicid : 6 fpu : yes fpu_exception : yes cpuid level : 13 wp : yes flags : fpu vme de pse bogomips : 6384.10 clflush size : 64 cache_alignment : 64 address sizes : 36 bits physical, 48 bits virtual power management: [8] """ def test_proc_info(self): """Proc info test.""" proc_info = sysinfo.proc_info(os.getpid()) self.assertEqual(os.getppid(), proc_info.ppid) # We do not check the starttime, but just verify that calling # proc_info twice returns same starttime, which can be used as part of # process signature. self.assertEqual(proc_info.starttime, sysinfo.proc_info(os.getpid()).starttime) @mock.patch('io.open', mock.mock_open(read_data=PROC_MEMINFO.strip())) def test_mem_info(self): """Mock test for mem info.""" meminfo = sysinfo.mem_info() self.assertEqual(7992596, meminfo.total) @mock.patch('os.statvfs', mock.Mock()) def test_disk_usage(self): """Mock test for disk usage.""" os.statvfs.return_value = collections.namedtuple( 'statvfs', 'f_blocks f_bavail, f_frsize')(100, 20, 4) du = sysinfo.disk_usage('/var/tmp') os.statvfs.assert_called_with('/var/tmp') self.assertEqual(400, du.total) self.assertEqual(80, du.free) @mock.patch('treadmill.cgutils.get_cpuset_cores', mock.Mock(return_value=six.moves.range(0, 4))) @mock.patch('io.open', mock.mock_open(read_data=CPUINFO.strip())) def test_bogomips(self): """Mock test for mem info.""" bogomips = sysinfo.total_bogomips() # bogomips : 6385.66 # bogomips : 6384.64 # bogomips : 6385.26 # bogomips : 6384.10 # ------------------- # total : 25539.659999999996 self.assertEqual(25539, bogomips) @mock.patch('time.time', mock.Mock(return_value=50)) @mock.patch('treadmill.cgroups.get_value', mock.Mock(return_value=42 * 1024**2)) @mock.patch('treadmill.cgutils.get_cpu_shares', mock.Mock(return_value=2)) @mock.patch('treadmill.sysinfo.BMIPS_PER_CPU', 1) @mock.patch('psutil.boot_time', mock.Mock(return_value=8)) def test_node_info(self): """Test node information report generation. """ # Access protected members # pylint: disable=W0212 mock_tm_env = mock.Mock( spec_set=treadmill.appenv.AppEnvironment, svc_cgroup=mock.Mock( spec_set=treadmill.services._base_service.ResourceService, ), svc_localdisk=mock.Mock( spec_set=treadmill.services._base_service.ResourceService, ), svc_network=mock.Mock( spec_set=treadmill.services._base_service.ResourceService, ), ) mock_tm_env.svc_localdisk.status.return_value = { 'size': 100 * 1024**2, } res = sysinfo.node_info(mock_tm_env, 'linux') mock_tm_env.svc_localdisk.status.assert_called_with(timeout=30) mock_tm_env.svc_cgroup.status.assert_called_with(timeout=30) self.assertEqual( res, { 'cpu': '200%', # 100% of 2 cores is available 'memory': '42M', # As read from cgroup 'disk': '100M', # As returned by localdisk service 'up_since': 8, 'network': mock_tm_env.svc_network.status.return_value, 'localdisk': mock_tm_env.svc_localdisk.status.return_value, })