def test_certificate_custom_template_with_unicode_data(self, custom_certs_enabled): """ Tests custom template renders properly with unicode data. """ mode = 'honor' self._add_course_certificates(count=1, signatory_count=2) self._create_custom_template(mode=mode) with patch.dict("django.conf.settings.FEATURES", { "CERTIFICATES_HTML_VIEW": True, "CUSTOM_CERTIFICATE_TEMPLATES_ENABLED": custom_certs_enabled }): test_url = get_certificate_url( user_id=self.user.id, course_id=unicode(self.course.id) ) with patch.dict("django.conf.settings.SOCIAL_SHARING_SETTINGS", { "CERTIFICATE_TWITTER": True, "CERTIFICATE_TWITTER_TEXT": u"nền tảng học tập" }): with patch('django.http.HttpRequest.build_absolute_uri') as mock_abs_uri: mock_abs_uri.return_value = '='.join(['http://localhost/?param', u'é']) with patch('certificates.api.get_course_organizations') as mock_get_orgs: mock_get_orgs.return_value = [] response = self.client.get(test_url) self.assertEqual(response.status_code, 200) if custom_certs_enabled: self.assertContains(response, 'mode: {}'.format(mode)) else: self.assertContains(response, "Tweet this Accomplishment") self.assertContains(response, 'https://twitter.com/intent/tweet')
def test_prompt_argument(self): mock = MagicMock(return_value={'retcode': 0, 'stdout': ''}) with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}): virtualenv_mod.create('/tmp/foo', prompt='PY Prompt') mock.assert_called_once_with( 'virtualenv --prompt=\'PY Prompt\' /tmp/foo', runas=None ) # Now with some quotes on the mix mock = MagicMock(return_value={'retcode': 0, 'stdout': ''}) with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}): virtualenv_mod.create('/tmp/foo', prompt='\'PY\' Prompt') mock.assert_called_once_with( 'virtualenv --prompt="\'PY\' Prompt" /tmp/foo', runas=None ) mock = MagicMock(return_value={'retcode': 0, 'stdout': ''}) with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}): virtualenv_mod.create('/tmp/foo', prompt='"PY" Prompt') mock.assert_called_once_with( 'virtualenv --prompt=\'"PY" Prompt\' /tmp/foo', runas=None )
def test_unrar(self): mock = MagicMock(return_value='salt') with patch.dict(archive.__salt__, {'cmd.run': mock}): ret = archive.unrar( '/tmp/rarfile.rar', '/home/strongbad/', excludes='file_1,file_2' ) self.assertEqual(['salt'], ret) mock.assert_called_once_with( 'unrar x -idp /tmp/rarfile.rar ' '-x file_1 -x file_2 /home/strongbad/', template=None ) mock = MagicMock(return_value='salt') with patch.dict(archive.__salt__, {'cmd.run': mock}): ret = archive.unrar( '/tmp/rarfile.rar', '/home/strongbad/', excludes=['file_1', 'file_2'] ) self.assertEqual(['salt'], ret) mock.assert_called_once_with( 'unrar x -idp /tmp/rarfile.rar ' '-x file_1 -x file_2 /home/strongbad/', template=None )
def test_rar(self): mock = MagicMock(return_value='salt') with patch.dict(archive.__salt__, {'cmd.run': mock}): ret = archive.rar( '/tmp/rarfile.rar', '/tmp/sourcefile1,/tmp/sourcefile2' ) self.assertEqual(['salt'], ret) mock.assert_called_once_with( 'rar a -idp /tmp/rarfile.rar ' '/tmp/sourcefile1 /tmp/sourcefile2', template=None ) mock = MagicMock(return_value='salt') with patch.dict(archive.__salt__, {'cmd.run': mock}): ret = archive.rar( '/tmp/rarfile.rar', ['/tmp/sourcefile1', '/tmp/sourcefile2'] ) self.assertEqual(['salt'], ret) mock.assert_called_once_with( 'rar a -idp /tmp/rarfile.rar ' '/tmp/sourcefile1 /tmp/sourcefile2', template=None )
def test_tar(self): mock = MagicMock(return_value='salt') with patch.dict(archive.__salt__, {'cmd.run': mock}): ret = archive.tar( 'zcvf', 'foo.tar', ['/tmp/something-to-compress-1', '/tmp/something-to-compress-2'], cwd=None, template=None ) self.assertEqual(['salt'], ret) mock.assert_called_once_with( 'tar -zcvf foo.tar /tmp/something-to-compress-1 ' '/tmp/something-to-compress-2', cwd=None, template=None ) mock = MagicMock(return_value='salt') with patch.dict(archive.__salt__, {'cmd.run': mock}): ret = archive.tar( 'zcvf', 'foo.tar', '/tmp/something-to-compress-1,/tmp/something-to-compress-2', cwd=None, template=None ) self.assertEqual(['salt'], ret) mock.assert_called_once_with( 'tar -zcvf foo.tar /tmp/something-to-compress-1 ' '/tmp/something-to-compress-2', cwd=None, template=None )
def test_unzip(self): mock = MagicMock(return_value='salt') with patch.dict(archive.__salt__, {'cmd.run': mock}): ret = archive.unzip( '/tmp/salt.{{grains.id}}.zip', '/tmp/dest', excludes='/tmp/tmpePe8yO,/tmp/tmpLeSw1A', template='jinja' ) self.assertEqual(['salt'], ret) mock.assert_called_once_with( 'unzip /tmp/salt.{{grains.id}}.zip -d /tmp/dest ' '-x /tmp/tmpePe8yO /tmp/tmpLeSw1A', template='jinja' ) mock = MagicMock(return_value='salt') with patch.dict(archive.__salt__, {'cmd.run': mock}): ret = archive.unzip( '/tmp/salt.{{grains.id}}.zip', '/tmp/dest', excludes=['/tmp/tmpePe8yO', '/tmp/tmpLeSw1A'], template='jinja' ) self.assertEqual(['salt'], ret) mock.assert_called_once_with( 'unzip /tmp/salt.{{grains.id}}.zip -d /tmp/dest ' '-x /tmp/tmpePe8yO /tmp/tmpLeSw1A', template='jinja' )
def test_issue_6030_deprecated_never_download(self): mock = MagicMock(return_value={'retcode': 0, 'stdout': ''}) with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}): virtualenv_mod.create( '/tmp/foo', never_download=True ) mock.assert_called_once_with( 'virtualenv --never-download /tmp/foo', runas=None ) with TestsLoggingHandler() as handler: mock = MagicMock(return_value={'retcode': 0, 'stdout': ''}) # Let's fake a higher virtualenv version virtualenv_mock = MagicMock() virtualenv_mock.__version__ = '1.10rc1' with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}): with patch.dict('sys.modules', {'virtualenv': virtualenv_mock}): virtualenv_mod.create( '/tmp/foo', never_download=True ) mock.assert_called_once_with('virtualenv /tmp/foo', runas=None) # Are we logging the deprecation information? self.assertIn( 'INFO:The virtualenv \'--never-download\' option has been ' 'deprecated in virtualenv(>=1.10), as such, the ' '\'never_download\' option to `virtualenv.create()` has ' 'also been deprecated and it\'s not necessary anymore.', handler.messages )
def test_direct_cfg(path1, path2, path3, path4): with patch.dict('os.environ', {'DATALAD_REPO_DIRECT': 'True'}): # create annex repo in direct mode: with swallow_logs(new_level=logging.DEBUG) as cml: ar = AnnexRepo(path1, create=True) cml.assert_logged("Switching to direct mode", regex=False, level='DEBUG') ok_(ar.is_direct_mode()) # but don't if repo version is 6 (actually, 6 or above): with swallow_logs(new_level=logging.WARNING) as cml: ar = AnnexRepo(path2, create=True, version=6) ok_(not ar.is_direct_mode()) cml.assert_logged("direct mode not available", regex=False, level='WARNING') # explicit parameter `direct` has priority: ar = AnnexRepo(path3, create=True, direct=False) if not ar.is_crippled_fs(): # otherwise forced direct mode ok_(not ar.is_direct_mode()) # don't touch existing repo: ar = AnnexRepo(path2, create=True) if not ar.is_crippled_fs(): # otherwise forced direct mode ok_(not ar.is_direct_mode()) # make sure, value is relevant: with patch.dict('os.environ', {'DATALAD_REPO_DIRECT': '0'}): # don't use direct mode ar = AnnexRepo(path4, create=True) if not ar.is_crippled_fs(): # otherwise forced direct mode ok_(not ar.is_direct_mode())
def test_freeze_command(self): eggs = [ 'M2Crypto==0.21.1', '-e [email protected]:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev', 'bbfreeze==1.1.0', 'bbfreeze-loader==1.1.0', 'pycrypto==2.6' ] mock = MagicMock( return_value={ 'retcode': 0, 'stdout': '\n'.join(eggs) } ) with patch.dict(pip.__salt__, {'cmd.run_all': mock}): ret = pip.freeze() mock.assert_called_once_with( 'pip freeze', runas=None, cwd=None ) self.assertEqual(ret, eggs) # Non zero returncode raises exception? mock = MagicMock(return_value={'retcode': 1, 'stderr': 'CABOOOOMMM!'}) with patch.dict(pip.__salt__, {'cmd.run_all': mock}): self.assertRaises( CommandExecutionError, pip.freeze, )
def test_device_serial(self): with patch.dict('os.environ', {'ANDROID_SERIAL': "ABCDEF123456"}): adb = Adb() adb.devices = MagicMock() adb.devices.return_value = {"ABCDEF123456": "device"} self.assertEqual(adb.device_serial(), "ABCDEF123456") with patch.dict('os.environ', {'ANDROID_SERIAL': "ABCDEF123456"}): adb = Adb() adb.devices = MagicMock() adb.devices.return_value = {"ABCDEF123456": "device", "123456ABCDEF": "device"} self.assertEqual(adb.device_serial(), "ABCDEF123456") with patch.dict('os.environ', {'ANDROID_SERIAL': "HIJKLMN098765"}): adb = Adb() adb.devices = MagicMock() adb.devices.return_value = {"ABCDEF123456": "device", "123456ABCDEF": "device"} with self.assertRaises(EnvironmentError): adb.device_serial() with patch.dict('os.environ', {}, clear=True): adb = Adb() adb.devices = MagicMock() adb.devices.return_value = {"ABCDEF123456": "device", "123456ABCDEF": "device"} with self.assertRaises(EnvironmentError): adb.device_serial() with patch.dict('os.environ', {}, clear=True): adb = Adb() adb.devices = MagicMock() adb.devices.return_value = {"ABCDEF123456": "device"} print(adb.devices()) self.assertEqual(adb.device_serial(), "ABCDEF123456") with self.assertRaises(EnvironmentError): adb = Adb() adb.devices = MagicMock() adb.devices.return_value = {} adb.device_serial()
def test_install_pre_argument_in_resulting_command(self): # Lower than 1.4 versions don't end-up with `--pre` in the resulting # output mock = MagicMock(side_effect=[ {'retcode': 0, 'stdout': 'pip 1.2.0 /path/to/site-packages/pip'}, {'retcode': 0, 'stdout': ''} ]) with patch.dict(pip.__salt__, {'cmd.run_all': mock}): pip.install( 'pep8', pre_releases=True ) mock.assert_called_with( 'pip install pep8', runas=None, cwd=None ) mock = MagicMock(side_effect=[ {'retcode': 0, 'stdout': 'pip 1.4.0 /path/to/site-pacakges/pip'}, {'retcode': 0, 'stdout': ''} ]) with patch.dict(pip.__salt__, {'cmd.run_all': mock}): pip.install( 'pep8', pre_releases=True ) mock.assert_called_with( 'pip install --pre pep8', runas=None, cwd=None )
def test_install_multiple_editable(self): editables = [ 'git+https://github.com/jek/blinker.git#egg=Blinker', 'git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting' ] # Passing editables as a list mock = MagicMock(return_value={'retcode': 0, 'stdout': ''}) with patch.dict(pip.__salt__, {'cmd.run_all': mock}): pip.install(editable=editables) mock.assert_called_once_with( 'pip install ' '--editable=git+https://github.com/jek/blinker.git#egg=Blinker ' '--editable=git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting', runas=None, cwd=None ) # Passing editables as a comma separated list mock = MagicMock(return_value={'retcode': 0, 'stdout': ''}) with patch.dict(pip.__salt__, {'cmd.run_all': mock}): pip.install(editable=','.join(editables)) mock.assert_called_once_with( 'pip install ' '--editable=git+https://github.com/jek/blinker.git#egg=Blinker ' '--editable=git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting', runas=None, cwd=None )
def test_uninstall_timeout_argument_in_resulting_command(self): # Passing an int mock = MagicMock(return_value={'retcode': 0, 'stdout': ''}) with patch.dict(pip.__salt__, {'cmd.run_all': mock}): pip.uninstall('pep8', timeout=10) mock.assert_called_once_with( 'pip uninstall -y --timeout=10 pep8', runas=None, cwd=None ) # Passing an int as a string mock = MagicMock(return_value={'retcode': 0, 'stdout': ''}) with patch.dict(pip.__salt__, {'cmd.run_all': mock}): pip.uninstall('pep8', timeout='10') mock.assert_called_once_with( 'pip uninstall -y --timeout=10 pep8', runas=None, cwd=None ) # Passing a non-int to timeout mock = MagicMock(return_value={'retcode': 0, 'stdout': ''}) with patch.dict(pip.__salt__, {'cmd.run_all': mock}): self.assertRaises( ValueError, pip.uninstall, 'pep8', timeout='a' )
def test_trigger_events_silent_view_method(self, mock_from): class A(object): pass mock_after = Mock() mock_before = Mock() ctx = A() view = Mock( Model=A, request=Mock(action='index'), _json_params={'bar': 1}, context=ctx, _silent=False) view.index._silent = True view.index._event_action = None with patch.dict(events.BEFORE_EVENTS, {'index': mock_before}): with patch.dict(events.AFTER_EVENTS, {'index': mock_after}): with events.trigger_events(view): pass assert not mock_after.called assert not mock_before.called assert not view.request.registry.notify.called assert not mock_from.called
def test_proctoring_js_includes(self): """ Make sure that proctoring JS does not get included on courseware pages if either the FEATURE flag is turned off or the course is not proctored enabled """ email, password = self.STUDENT_INFO[0] self.login(email, password) self.enroll(self.test_course, True) test_course_id = self.test_course.id.to_deprecated_string() with patch.dict(settings.FEATURES, {"ENABLE_SPECIAL_EXAMS": False}): url = reverse("courseware", kwargs={"course_id": test_course_id}) resp = self.client.get(url) self.assertNotContains(resp, "/static/js/lms-proctoring.js") with patch.dict(settings.FEATURES, {"ENABLE_SPECIAL_EXAMS": True}): url = reverse("courseware", kwargs={"course_id": test_course_id}) resp = self.client.get(url) self.assertNotContains(resp, "/static/js/lms-proctoring.js") # now set up a course which is proctored enabled self.test_course.enable_proctored_exams = True self.test_course.save() modulestore().update_item(self.test_course, self.user.id) resp = self.client.get(url) self.assertContains(resp, "/static/js/lms-proctoring.js")
def test_url_protocol_id(self, mocked_get_secret_key): """ PUSHTOKEN: Test url protocol id customization """ public_key, secret_key = crypto_sign_keypair() mocked_get_secret_key.return_value = secret_key user_public_key = base64.b64encode(public_key) fake = FakeTokenModel() token = PushTokenClass(fake) token.addToTokenInfo('partition', 0) token.addToTokenInfo('user_token_id', 123) token.addToTokenInfo('user_dsa_public_key', user_public_key) with nested(patch.dict(config), request_context_safety()): if 'mobile_app_protocol_id' in config: del config['mobile_app_protocol_id'] request_context['hsm'] = fake_hsm_wrapper # if no mobile_app_protocol_id is set, it should default # to lseqr message = 'here are the 2,750 quit you asked for. can i move' + \ 'to OT I level now? - tom' url, _ = token.create_challenge_url(transaction_id='1234567890', content_type=0, message=message, callback_url='foo') self.assertTrue(url.startswith('lseqr://')) # ------------------------------------------------------------------ -- fake = FakeTokenModel() token = PushTokenClass(fake) token.addToTokenInfo('partition', 0) token.addToTokenInfo('user_token_id', 123) token.addToTokenInfo('user_dsa_public_key', user_public_key) with nested(patch.dict(config, {'mobile_app_protocol_id': 'yolo'}), request_context_safety()): request_context['hsm'] = fake_hsm_wrapper message = 'here are the 2,750 quit you asked for. can i move' + \ 'to OT I level now? - tom' url, _ = token.create_challenge_url(transaction_id='1234567890', content_type=0, message=message, callback_url='foo') self.assertTrue(url.startswith('yolo://'))
def test_trigger_events(self, mock_from): mock_after = Mock() mock_before = Mock() mock_from.return_value = {'foo': 1} ctx = Mock() view = Mock( request=Mock(action='index'), _json_params={'bar': 1}, context=ctx, _silent=False) view.index._silent = False view.index._event_action = None with patch.dict(events.BEFORE_EVENTS, {'index': mock_before}): with patch.dict(events.AFTER_EVENTS, {'index': mock_after}): with events.trigger_events(view): pass mock_after.assert_called_once_with( fields={'foo': 1}, model=view.Model, instance=ctx, view=view) mock_before.assert_called_once_with( fields={'foo': 1}, model=view.Model, instance=ctx, view=view) view.request.registry.notify.assert_has_calls([ call(mock_before()), call(mock_after()), ]) mock_from.assert_called_once_with({'bar': 1}, view.Model)
def test_trigger_events_different_action(self, mock_from): class A(object): pass mock_from.return_value = {'foo': 1} mock_after = Mock() mock_before = Mock() ctx = A() mock_initial_state = Mock() view = Mock( Model=A, request=Mock(action='index'), _json_params={'bar': 1}, context=ctx, _silent=False, initial_state=mock_initial_state) view.index._silent = None view.index._event_action = 'delete' with patch.dict(events.BEFORE_EVENTS, {'delete': mock_before}): with patch.dict(events.AFTER_EVENTS, {'delete': mock_after}): with events.trigger_events(view): pass mock_after.assert_called_once_with( fields={'foo': 1}, model=view.Model, view=view, response=view._response, initial_state=mock_initial_state) mock_before.assert_called_once_with( fields={'foo': 1}, model=view.Model, view=view, initial_state=mock_initial_state) view.request.registry.notify.assert_has_calls([ call(mock_before()), call(mock_after()), ]) mock_from.assert_called_once_with({'bar': 1}, view.Model)
def test_bridge_line(self): self.assertRaises(onion.UnrecognizedTransport, onion.bridge_line, 'rot13', '/log.txt') onion.find_executable = Mock(return_value=False) self.assertRaises(onion.UninstalledTransport, onion.bridge_line, 'fte', '/log.txt') onion.find_executable = Mock(return_value="/fakebin") for transport, exp_line in sample_transport_lines.iteritems(): self.assertEqual(onion.bridge_line(transport, '/log.txt'), exp_line) with patch.dict(onion.obfsproxy_details, {'version': onion.OBFSProxyVersion('0.1.12')}): self.assertRaises(onion.OutdatedObfsproxy, onion.bridge_line, 'obfs2', '/log.txt') with patch.dict(onion.tor_details, {'version': onion.TorVersion('0.2.4.20')}): onion.bridge_line('fte', '/log.txt') self.assertRaises(onion.OutdatedTor, onion.bridge_line, 'scramblesuit', '/log.txt') self.assertRaises(onion.OutdatedTor, onion.bridge_line, 'obfs4', '/log.txt') with patch.dict(onion.tor_details, {'version': onion.TorVersion('0.2.3.20')}): self.assertRaises(onion.OutdatedTor, onion.bridge_line, 'fte', '/log.txt')
def test_check_object_creation(self): req = Mock() req.headers = dict() valid_object_names = ["a/b/c/d", '/'.join(("1@3%&*0-", "};+=]|")), '/'.join(('a' * 255, 'b' * 255, 'c' * 221))] for o in valid_object_names: self.assertFalse(cnt.check_object_creation(req, o)) invalid_object_names = ["a/./b", "a/b/../d", "a//b", "a/c//", '/'.join(('a' * 256, 'b' * 255, 'c' * 221)), '/'.join(('a' * 255, 'b' * 255, 'c' * 222))] for o in invalid_object_names: self.assertTrue(cnt.check_object_creation(req, o)) # Check for creation of directory marker objects that ends with slash with patch.dict(req.headers, {'content-type': 'application/directory'}): self.assertFalse(cnt.check_object_creation(req, "a/b/c/d/")) # Check creation of objects ending with slash having any other content # type than application/directory is not allowed for content_type in ('text/plain', 'text/html', 'image/jpg', 'application/octet-stream', 'blah/blah'): with patch.dict(req.headers, {'content-type': content_type}): self.assertTrue(cnt.check_object_creation(req, "a/b/c/d/"))
def test_creation(self): """ The user that creates a library should have instructor (admin) and staff permissions """ # self.library has been auto-created by the staff user. self.assertTrue(has_studio_write_access(self.user, self.lib_key)) self.assertTrue(has_studio_read_access(self.user, self.lib_key)) # Make sure the user was actually assigned the instructor role and not just using is_staff superpowers: self.assertTrue(CourseInstructorRole(self.lib_key).has_user(self.user)) # Now log out and ensure we are forbidden from creating a library: self.client.logout() self._assert_cannot_create_library(expected_code=302) # 302 redirect to login expected # Now check that logged-in users without CourseCreator role cannot create libraries self._login_as_non_staff_user(logout_first=False) with patch.dict('django.conf.settings.FEATURES', {'ENABLE_CREATOR_GROUP': True}): self._assert_cannot_create_library(expected_code=403) # 403 user is not CourseCreator # Now check that logged-in users with CourseCreator role can create libraries add_user_with_status_granted(self.user, self.non_staff_user) with patch.dict('django.conf.settings.FEATURES', {'ENABLE_CREATOR_GROUP': True}): lib_key2 = self._create_library(library="lib2", display_name="Test Library 2") library2 = modulestore().get_library(lib_key2) self.assertIsNotNone(library2)
def test_report_package_profile_environment_variable(self): with patch.dict('os.environ', {'SUBMAN_DISABLE_PROFILE_REPORTING': '1'}), \ patch.object(cache, 'conf') as conf: # report_package_profile is set to 1 and SUBMAN_DISABLE_PROFILE_REPORTING is set to 1, the # package profile should not be reported. conf.__getitem__.return_value.get_int.return_value = 1 self.assertFalse(self.profile_mgr.profile_reporting_enabled()) # report_package_profile in rhsm.conf is set to 0 and SUBMAN_DISABLE_PROFILE_REPORTING is set # to 1, the package profile should not be reported. conf.__getitem__.return_value.get_int.return_value = 0 self.assertFalse(self.profile_mgr.profile_reporting_enabled()) with patch.dict('os.environ', {'SUBMAN_DISABLE_PROFILE_REPORTING': '0'}), \ patch.object(cache, 'conf') as conf: # report_package_profile in rhsm.conf is set to 1 and SUBMAN_DISABLE_PROFILE_REPORTING is set # to 0, the package profile should be reported. conf.__getitem__.return_value.get_int.return_value = 1 self.assertTrue(self.profile_mgr.profile_reporting_enabled()) # report_package_profile in rhsm.conf is set to 0 and SUBMAN_DISABLE_PROFILE_REPORTING is set # to 0, the package profile should not be reported. conf.__getitem__.return_value.get_int.return_value = 0 self.assertFalse(self.profile_mgr.profile_reporting_enabled()) with patch.dict('os.environ', {}), patch.object(cache, 'conf') as conf: # report_package_profile in rhsm.conf is set to 1 and SUBMAN_DISABLE_PROFILE_REPORTING is not # set, the package profile should be reported. conf.__getitem__.return_value.get_int.return_value = 1 self.assertTrue(self.profile_mgr.profile_reporting_enabled()) # report_package_profile in rhsm.conf is set to 0 and SUBMAN_DISABLE_PROFILE_REPORTING is not # set, the package profile should not be reported. conf.__getitem__.return_value.get_int.return_value = 0 self.assertFalse(self.profile_mgr.profile_reporting_enabled())
def test_check_object_creation(self): req = Mock() req.headers = dict() valid_object_names = ["a/b/c/d", "/".join(("1@3%&*0-", "};+=]|")), "/".join(("a" * 255, "b" * 255, "c" * 221))] for o in valid_object_names: self.assertFalse(cnt.check_object_creation(req, o)) invalid_object_names = [ "a/./b", "a/b/../d", "a//b", "a/c//", "/".join(("a" * 256, "b" * 255, "c" * 221)), "/".join(("a" * 255, "b" * 255, "c" * 222)), ] for o in invalid_object_names: self.assertTrue(cnt.check_object_creation(req, o)) # Check for creation of directory marker objects that ends with slash with patch.dict(req.headers, {"content-type": "application/directory"}): self.assertFalse(cnt.check_object_creation(req, "a/b/c/d/")) # Check creation of objects ending with slash having any other content # type than application/directory is not allowed for content_type in ("text/plain", "text/html", "image/jpg", "application/octet-stream", "blah/blah"): with patch.dict(req.headers, {"content-type": content_type}): self.assertTrue(cnt.check_object_creation(req, "a/b/c/d/"))
def test_uninstall_multiple_requirements_arguments_in_resulting_command(self, get_cached_requirements): get_cached_requirements.side_effect = ["my_cached_reqs-1", "my_cached_reqs-2"] requirements = ["salt://requirements-1.txt", "salt://requirements-2.txt"] # Passing option as a list mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.uninstall(requirements=requirements) mock.assert_called_once_with( "pip uninstall -y " "--requirement='my_cached_reqs-1' " "--requirement='my_cached_reqs-2'", runas=None, cwd=None, ) # Passing option as a comma separated list get_cached_requirements.side_effect = ["my_cached_reqs-1", "my_cached_reqs-2"] mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.uninstall(requirements=",".join(requirements)) mock.assert_called_once_with( "pip uninstall -y " "--requirement='my_cached_reqs-1' " "--requirement='my_cached_reqs-2'", runas=None, cwd=None, ) # Passing option as a single string entry get_cached_requirements.side_effect = ["my_cached_reqs-1"] mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.uninstall(requirements=requirements[0]) mock.assert_called_once_with("pip uninstall -y --requirement='my_cached_reqs-1'", runas=None, cwd=None)
def test_exc_str(): try: raise Exception("my bad") except Exception as e: estr = exc_str(e) assert_re_in("my bad \[test_dochelpers.py:test_exc_str:...\]", estr) def f(): def f2(): raise Exception("my bad again") f2() try: f() except Exception as e: # default one: estr2 = exc_str(e, 2) estr1 = exc_str(e, 1) # and we can control it via environ by default with patch.dict('os.environ', {'DATALAD_EXC_STR_TBLIMIT': '3'}): estr3 = exc_str(e) with patch.dict('os.environ', {}, clear=True): estr_ = exc_str() assert_re_in("my bad again \[test_dochelpers.py:test_exc_str:...,test_dochelpers.py:f:...,test_dochelpers.py:f2:...\]", estr3) assert_re_in("my bad again \[test_dochelpers.py:f:...,test_dochelpers.py:f2:...\]", estr2) assert_re_in("my bad again \[test_dochelpers.py:f2:...\]", estr1) assert_equal(estr_, estr1) try: raise NotImplementedError except Exception as e: assert_re_in("NotImplementedError\(\) \[test_dochelpers.py:test_exc_str:...\]", exc_str(e))
def test_install_install_options_argument_in_resulting_command(self): install_options = ["--exec-prefix=/foo/bar", "--install-scripts=/foo/bar/bin"] # Passing options as a list mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install("pep8", install_options=install_options) mock.assert_called_once_with( "pip install " "--install-option='--exec-prefix=/foo/bar' " "--install-option='--install-scripts=/foo/bar/bin' pep8", runas=None, cwd=None, ) # Passing mirrors as a comma separated list mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install("pep8", install_options=",".join(install_options)) mock.assert_called_once_with( "pip install " "--install-option='--exec-prefix=/foo/bar' " "--install-option='--install-scripts=/foo/bar/bin' pep8", runas=None, cwd=None, ) # Passing mirrors as a single string entry mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install("pep8", install_options=install_options[0]) mock.assert_called_once_with( "pip install --install-option='--exec-prefix=/foo/bar' pep8", runas=None, cwd=None )
def test_install_multiple_editable(self): editables = [ "git+https://github.com/jek/blinker.git#egg=Blinker", "git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting", ] # Passing editables as a list mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(editable=editables) mock.assert_called_once_with( "pip install " "--editable=git+https://github.com/jek/blinker.git#egg=Blinker " "--editable=git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting", runas=None, cwd=None, ) # Passing editables as a comma separated list mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(editable=",".join(editables)) mock.assert_called_once_with( "pip install " "--editable=git+https://github.com/jek/blinker.git#egg=Blinker " "--editable=git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting", runas=None, cwd=None, )
def test_list_command(self): eggs = [ "M2Crypto==0.21.1", "-e [email protected]:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev", "bbfreeze==1.1.0", "bbfreeze-loader==1.1.0", "pycrypto==2.6", ] mock = MagicMock(return_value={"retcode": 0, "stdout": "\n".join(eggs)}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ret = pip.list_() mock.assert_called_once_with("pip freeze", runas=None, cwd=None) self.assertEqual( ret, { "SaltTesting-dev": "[email protected]:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8", "M2Crypto": "0.21.1", "bbfreeze-loader": "1.1.0", "bbfreeze": "1.1.0", "pycrypto": "2.6", }, ) # Non zero returncode raises exception? mock = MagicMock(return_value={"retcode": 1, "stderr": "CABOOOOMMM!"}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): self.assertRaises(CommandExecutionError, pip.list_)
def test_proxy_detection(self, proxy, bah, build, install): with patch.dict('os.environ', {'http_proxy': 'something'}, clear=True): setup_proxy_opener() proxy.assert_called_with({'http': 'something'}) with patch.dict('os.environ', {'https_proxy': 'somethings'}, clear=True): setup_proxy_opener() proxy.assert_called_with({'https': 'somethings'})
def test_color_enabled(): # In the absence of NO_COLOR, follow ui.color, or ui.is_interactive if 'auto' with patch.dict(os.environ), \ patch('datalad.support.ansi_colors.ui'): os.environ.pop('NO_COLOR', None) for is_interactive in (True, False): colors.ui.is_interactive = is_interactive with patch_config({'datalad.ui.color': 'off'}): assert_equal(colors.color_enabled(), False) with patch_config({'datalad.ui.color': 'on'}): assert_equal(colors.color_enabled(), True) with patch_config({'datalad.ui.color': 'auto'}): assert_equal(colors.color_enabled(), is_interactive) # In the presence of NO_COLOR, default to disable, unless ui.color is "on" # The value of NO_COLOR should have no effect, so try true-ish and false-ish values for NO_COLOR in ("", "1", "0"): with patch.dict(os.environ, {'NO_COLOR': NO_COLOR}), \ patch('datalad.support.ansi_colors.ui'): for is_interactive in (True, False): colors.ui.is_interactive = is_interactive with patch_config({'datalad.ui.color': 'on'}): assert_equal(colors.color_enabled(), True) for ui_color in ('off', 'auto'): with patch_config({'datalad.ui.color': ui_color}): assert_equal(colors.color_enabled(), False)
def mocked_environment_prod(): mocked_environment_prod = patch.dict(os.environ, {"BUILDABLES_TABLE_NAME": "test", "MM_STAGE": "prod", "ACCOUNT_ID": "8675309"} ) return mocked_environment_prod
def test_twitter_login(self): """Test Twitter login is disabled.""" url = '/twitter/' with patch.dict(self.flask_app.config, {'LDAP_HOST': '127.0.0.1'}): res = self.app.get(url) assert res.status_code == 404, res.status_code
def test_get_user_data_access_db_clause(self): invalid_data_access = ['L5'] with patch.dict(data_access.data_access_levels, self.patched_levels()): query = data_access.get_user_data_access_db_clause( invalid_data_access) assert not query
def test__get_attribute_raises_on_invalid_structure(self): with patch.dict('os.environ', {'HOME': '../data/kiwi_config/broken'}): runtime_config = RuntimeConfig(reread=True) with raises(KiwiRuntimeConfigFormatError): runtime_config._get_attribute('foo', 'bar')
def test_snappy_import_fails(self): import sys with patch.dict(sys.modules, values={'snappy': None}): reload(afkak.codec) self.assertFalse(afkak.codec.has_snappy()) reload(afkak.codec)
def patch_oss_jenkins(**kwargs): jenkins_mock = create_oss_jenkins_mock_module(**kwargs) with patch.dict(sys.modules, {'jenkins': jenkins_mock}): yield
def metadata_edit(self, values): return patch.dict(self.handler._provider_metadata, values)
def test_get_my_ipv4_address_without_default_route( self, ifaddr, gateways): with patch.dict(netifaces.__dict__, {'AF_INET': '0'}): ifaddr.return_value = {} addr = netutils._get_my_ipv4_address() self.assertEqual('127.0.0.1', addr)
def test_get_my_ipv4_address_with_default_route( self, ifaddr, gateways): with patch.dict(netifaces.__dict__, {'AF_INET': '0'}): ifaddr.return_value = {'0': [{'addr': '172.18.204.1'}]} addr = netutils._get_my_ipv4_address() self.assertEqual('172.18.204.1', addr)
"""Units tests for the Rak811Serial class.""" from time import sleep from mock import Mock, patch from pytest import raises from serial import EIGHTBITS # Ignore RPi.GPIO p = patch.dict('sys.modules', {'RPi': Mock()}) p.start() from rak811.serial import BAUDRATE, PORT, TIMEOUT # noqa: E402, I100 from rak811.serial import Rak811Serial, Rak811TimeoutError # noqa: E402 @patch('rak811.serial.Serial') def test_instantiate_default(mock_serial): """Test that Rak811Serial can be instantiated. Check for basic initialisation and teardown of the serial interface. """ rs = Rak811Serial() # Test default parameters are used mock_serial.assert_called_once_with(port=PORT, baudrate=BAUDRATE, timeout=TIMEOUT) # Class initialization mock_serial.return_value.reset_input_buffer.assert_called_once() assert rs._alive # Test tear down rs.close() mock_serial.return_value.close.assert_called_once()
def test_environment_variables(self): with patch.dict('os.environ', {'NACL_ARCH': 'arm'}): self.assertEqual(Configuration().arch, 'arm') with patch.dict('os.environ', {'NACL_DEBUG': '1'}): self.assertEqual(Configuration().debug, True)
def test_config_sections_other_settings(self): with patch.dict('os.environ', {'HOME': '../data/kiwi_config/other'}): runtime_config = RuntimeConfig(reread=True) assert runtime_config.get_container_compression() is True assert runtime_config.get_package_changes() is True
def test_get_param_bad_value(self): """AppIntegrationConfig - Get parameter, bad json value""" config_name = '{}_config'.format(FUNCTION_NAME) with patch.dict(AppConfig.SSM_CLIENT._parameters, {config_name: 'bad json'}): AppConfig._get_parameters([config_name])
def pdict(self, *args, **kwargs): patcher = patch.dict(*args, **kwargs) self._patchers.append(patcher) return patcher.start()
def test_parse_args_DCI_PASSWORD_env_variable(self): with patch.dict('os.environ', {'DCI_PASSWORD': '******'}): args = parse_args([]) self.assertEqual(args.dci_password, 'bar')
def test_cli_overload_DCI_PASSWORD_env_variable(self): with patch.dict('os.environ', {'DCI_PASSWORD': '******'}): args = parse_args(['--dci-password', 'foo']) self.assertEqual(args.dci_password, 'foo')
def test_parse_args_DCI_LOGIN_env_variable(self): with patch.dict('os.environ', {'DCI_LOGIN': '******'}): args = parse_args([]) self.assertEqual(args.dci_login, 'foo')
def test_cli_overload_DCI_LOGIN_env_variable(self): with patch.dict('os.environ', {'DCI_LOGIN': '******'}): args = parse_args(['--dci-login', 'foo']) self.assertEqual(args.dci_login, 'foo')
def _get_install_script(self, add_ssl_cert=True): with patch.dict(os.environ, {constants.MANAGER_NAME: 'cloudify'}): script_builder = script._get_script_builder( cloudify_agent=self.input_cloudify_agent ) return script_builder.install_script(add_ssl_cert=add_ssl_cert)
def test_parse_args_DCI_CS_URL_env_variable(self): with patch.dict('os.environ', {'DCI_CS_URL': 'https://api.example.org'}): args = parse_args([]) self.assertEqual(args.dci_cs_url, 'https://api.example.org')
def test_exportdb(): ''' Test udevdb.exportdb method. :return: ''' udev_data = mockery.get_test_data('udev.sample') out = [ { 'P': '/devices/LNXSYSTM:00/LNXPWRBN:00', 'E': { 'MODALIAS': 'acpi:LNXPWRBN:', 'SUBSYSTEM': 'acpi', 'DRIVER': 'button', 'DEVPATH': '/devices/LNXSYSTM:00/LNXPWRBN:00' } }, { 'P': '/devices/LNXSYSTM:00/LNXPWRBN:00/input/input2', 'E': { 'SUBSYSTEM': 'input', 'PRODUCT': '19/0/1/0', 'PHYS': '"LNXPWRBN/button/input0"', 'NAME': '"Power Button"', 'ID_INPUT': 1, 'DEVPATH': '/devices/LNXSYSTM:00/LNXPWRBN:00/input/input2', 'MODALIAS': 'input:b0019v0000p0001e0000-e0,1,k74,ramlsfw', 'ID_PATH_TAG': 'acpi-LNXPWRBN_00', 'TAGS': ':seat:', 'PROP': 0, 'ID_FOR_SEAT': 'input-acpi-LNXPWRBN_00', 'KEY': '10000000000000 0', 'USEC_INITIALIZED': 2010022, 'ID_PATH': 'acpi-LNXPWRBN:00', 'EV': 3, 'ID_INPUT_KEY': 1 } }, { 'P': '/devices/LNXSYSTM:00/LNXPWRBN:00/input/input2/event2', 'E': { 'SUBSYSTEM': 'input', 'XKBLAYOUT': 'us', 'MAJOR': 13, 'ID_INPUT': 1, 'DEVPATH': '/devices/LNXSYSTM:00/LNXPWRBN:00/input/input2/event2', 'ID_PATH_TAG': 'acpi-LNXPWRBN_00', 'DEVNAME': '/dev/input/event2', 'TAGS': ':power-switch:', 'BACKSPACE': 'guess', 'MINOR': 66, 'USEC_INITIALIZED': 2076101, 'ID_PATH': 'acpi-LNXPWRBN:00', 'XKBMODEL': 'pc105', 'ID_INPUT_KEY': 1 }, 'N': 'input/event2' }, { 'P': '/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0', 'E': { 'MODALIAS': 'scsi:t-0x00', 'SUBSYSTEM': 'scsi', 'DEVTYPE': 'scsi_device', 'DRIVER': 'sd', 'DEVPATH': '/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0' }, 'X-Mgr': { 'SCSI_SYS_TYPE': '0' } }, ] with patch.dict( udevdb.__salt__, { 'cmd.run_all': MagicMock(side_effect=[{ 'retcode': 0, 'stdout': udev_data }, { 'retcode': 0, 'stdout': '0' }]) }): data = udevdb.exportdb() assert data == filter(None, data) for d_idx, d_section in enumerate(data): assert out[d_idx]['P'] == d_section['P'] assert out[d_idx].get('N') == d_section.get('N') assert out[d_idx].get('X-Mgr') == d_section.get('X-Mgr') for key, value in d_section['E'].items(): assert out[d_idx]['E'][key] == value
# coding=utf-8 """ pytest file """ # Hardware specific libs are found through out the flask app pages # and the following mock work will patch them so that we can pretend # that we have them installed: from mock import patch, MagicMock patch.dict("sys.modules", RPi=MagicMock(), imutils=MagicMock(), picamera=MagicMock(), AM2315=MagicMock(), tentacle_pi=MagicMock(), Adafruit_BMP=MagicMock(), Adafruit_TMP=MagicMock(), w1thermsensor=MagicMock(), sht_sensor=MagicMock(), smbus=MagicMock(), ).start() import pytest import tempfile import shutil import os from mycodo.mycodo_flask.app import create_app from mycodo.config import TestConfig from webtest import TestApp from mycodo.mycodo_flask.extensions import db as _db from mycodo.tests.software_tests.factories import UserFactory from mycodo.databases.models import Role from mycodo.databases.models import User from mycodo.databases.models import populate_db
import os from argparse import Namespace import boto3 from botocore.exceptions import ClientError from mock import patch, MagicMock, ANY, call import pyarrow as pa import pytest from pyarrow.lib import ArrowException from s3 import DeleteOldVersionsError, IntegrityCheckFailedError with patch.dict(os.environ, { "DELETE_OBJECTS_QUEUE": "https://url/q.fifo", "DLQ": "https://url/q", }): from backend.ecs_tasks.delete_files.main import ( kill_handler, execute, handle_error, get_queue, main, parse_args, delete_matches_from_file, ) pytestmark = [pytest.mark.unit, pytest.mark.ecs_tasks] def get_list_object_versions_error():
def test_copy_user_data_access_levels(self): with patch.dict(data_access.data_access_levels, self.patched_levels()): target = dict() access_level = ['L3'] data_access.copy_user_data_access_levels(target, access_level) assert target['data_access'] == access_level
def env(**kwargs): with patch.dict(os.environ, clear=True, **kwargs): yield
def test_register_404(self): """Test register is disabled for ldap.""" with patch.dict(self.flask_app.config, {'LDAP_HOST': '127.0.0.1'}): url = '/account/register' res = self.app_get_json(url) assert res.status_code == 302, res.data # redirect to login
def PatchCompleter(completer, filetype): user_options = handlers._server_state._user_options with patch.dict('ycmd.handlers._server_state._filetype_completers', {filetype: completer(user_options)}): yield
def test_taskrun_with_upload(self, upload_from_string, set_content): with patch.dict(self.flask_app.config, self.patch_config): project = ProjectFactory.create() task = TaskFactory.create(project=project) self.app.get('/api/project/%s/newtask?api_key=%s' % (project.id, project.owner.api_key)) data = dict(project_id=project.id, task_id=task.id, info={ 'test__upload_url': { 'filename': 'hello.txt', 'content': 'abc' }, 'another_field': 42 }) datajson = json.dumps(data) url = '/api/taskrun?api_key=%s' % project.owner.api_key success = self.app.post(url, data=datajson) assert success.status_code == 200, success.data set_content.assert_called() res = json.loads(success.data) assert len(res['info']) == 1 url = res['info']['pyb_answer_url'] args = { 'host': self.host, 'bucket': self.bucket, 'project_id': project.id, 'task_id': task.id, 'user_id': project.owner.id, 'filename': 'pyb_answer.json' } expected = 'https://{host}/{bucket}/{project_id}/{task_id}/{user_id}/{filename}'.format( **args) assert url == expected, url aes = AESWithGCM('testkey') # first call first_call = set_content.call_args_list[0] args, kwargs = first_call encrypted = args[0].read() content = aes.decrypt(encrypted) assert encrypted != content assert content == 'abc' upload_from_string.assert_called() args, kwargs = set_content.call_args content = aes.decrypt(args[0].read()) actual_content = json.loads(content) args = { 'host': self.host, 'bucket': self.bucket, 'project_id': project.id, 'task_id': task.id, 'user_id': project.owner.id, 'filename': 'hello.txt' } expected = 'https://{host}/{bucket}/{project_id}/{task_id}/{user_id}/{filename}'.format( **args) assert actual_content['test__upload_url'] == expected assert actual_content['another_field'] == 42
def test_all_does_not_raise_with_empty_path_envvar(self): """ additionally, tests that the module does not raise at import """ with patch.dict(os.environ, clear=True): reload(interpreter) interpreter.PythonInterpreter.all()
def test_stream_present(): ''' Test to ensure the kinesis stream exists. ''' name = 'new_stream' retention_hours = 24 enhanced_monitoring = ['IteratorAgeMilliseconds'] different_enhanced_monitoring = ['IncomingBytes'] num_shards = 1 ret = {'name': name, 'result': True, 'changes': {}, 'comment': ''} shards = [{ 'ShardId': 'shardId-000000000000', 'HashKeyRange': { 'EndingHashKey': 'big number', 'StartingHashKey': '0' }, 'SequenceNumberRange': { 'StartingSequenceNumber': 'bigger number' } }] stream_description = { 'HasMoreShards': False, 'RetentionPeriodHours': retention_hours, 'StreamName': name, 'Shards': shards, 'StreamARN': "", 'EnhancedMonitoring': [{ 'ShardLevelMetrics': enhanced_monitoring }], 'StreamStatus': 'ACTIVE' } exists_mock = MagicMock(side_effect=[{ 'result': True }, { 'result': False }, { 'result': True }, { 'result': False }]) get_stream_mock = MagicMock( return_value={'result': { 'StreamDescription': stream_description }}) shard_mock = MagicMock(return_value=[0, 0, {'OpenShards': shards}]) dict_mock = MagicMock(return_value={'result': True}) mock_bool = MagicMock(return_value=True) with patch.dict( boto_kinesis.__salt__, { 'boto_kinesis.exists': exists_mock, 'boto_kinesis.create_stream': dict_mock, 'boto_kinesis.get_stream_when_active': get_stream_mock, 'boto_kinesis.get_info_for_reshard': shard_mock, 'boto_kinesis.num_shards_matches': mock_bool }): # already present, no change required comt = ( 'Kinesis stream {0} already exists,\n' 'Kinesis stream {0}: retention hours did not require change, already set at {1},\n' 'Kinesis stream {0}: enhanced monitoring did not require change, already set at {2},\n' 'Kinesis stream {0}: did not require resharding, remains at {3} shards' .format(name, retention_hours, enhanced_monitoring, num_shards)) ret.update({'comment': comt}) assert boto_kinesis.present(name, retention_hours, enhanced_monitoring, num_shards) == ret with patch.dict(boto_kinesis.__opts__, {'test': True}): # not present, test environment (dry run) comt = ('Kinesis stream {0} would be created'.format(name)) ret.update({'comment': comt, 'result': None}) assert boto_kinesis.present(name, retention_hours, enhanced_monitoring, num_shards) == ret # already present, changes required, test environment (dry run) comt = ( 'Kinesis stream {0} already exists,\n' 'Kinesis stream {0}: retention hours would be updated to {1},\n' 'Kinesis stream {0}: would enable enhanced monitoring for {2},\n' 'Kinesis stream {0}: would disable enhanced monitoring for {3},\n' 'Kinesis stream {0}: would be resharded from {4} to {5} shards' .format(name, retention_hours + 1, different_enhanced_monitoring, enhanced_monitoring, num_shards, num_shards + 1)) ret.update({'comment': comt, 'result': None}) assert boto_kinesis.present(name, retention_hours + 1, different_enhanced_monitoring, num_shards + 1) == ret # not present, create and configure changes = {'new': {'name': name, 'num_shards': num_shards}} with patch.dict(boto_kinesis.__opts__, {'test': False}): comt = ( 'Kinesis stream {0} successfully created,\n' 'Kinesis stream {0}: retention hours did not require change, already set at {1},\n' 'Kinesis stream {0}: enhanced monitoring did not require change, already set at {2},\n' 'Kinesis stream {0}: did not require resharding, remains at {3} shards' .format(name, retention_hours, enhanced_monitoring, num_shards)) ret.update({'comment': comt, 'result': True, 'changes': changes}) assert ret == boto_kinesis.present(name, retention_hours, enhanced_monitoring, num_shards)