def _plotly_init_notebook_mode(self): # Hack into display routine. Also pretend that the InteractiveShell is # initialized as display() is otherwise turned into a no-op. with patch.multiple(IPython.core.display, publish_display_data=self._send_display_data), \ patch.multiple(InteractiveShell, initialized=lambda: True): plotly.offline.init_notebook_mode()
def test_init(self): def serial_return(): raise SerialException with patch.multiple('lib.arduino', serial=serial_return): ard = Arduino() self.assertEqual(ard.no_serial, True) with patch.multiple('lib.arduino', serial=MagicMock()): ard = Arduino() self.assertEqual(ard.no_serial, False)
def test_valid_form(self): class ValidForm(): def is_valid(self): return True valid_form = ValidForm() with patch.multiple( DynaWizard, get_form_instance=MagicMock(return_value=valid_form), get_next_step=DEFAULT, redirect_to_step=DEFAULT, ) as mocks: wiz = DynaWizard() step = 'step' request = self.request_factory.post('') request.POST = {'key1': 'value1'} result = DynaWizard.as_view()(request, step=step) wiz.get_form_instance.assert_called_with( step=step, form_kwargs=request.POST, ) wiz.get_next_step.assert_called_with( current_step=step, ) self.assertEquals(result, wiz.redirect_to_step.return_value)
def test_invalid_form(self): class InvalidForm(): def is_valid(self): return False invalid_form = InvalidForm() with patch.multiple( DynaWizard, get_form_instance=MagicMock(return_value=invalid_form), render_step=DEFAULT, ) as mocks: wiz = DynaWizard() step = 'step' request = self.request_factory.post('') request.POST = {'key1': 'value1'} result = DynaWizard.as_view()(request, step=step) wiz.get_form_instance.assert_called_with( step=step, form_kwargs=request.POST, ) wiz.render_step.assert_called_with( request, step=step, context={ 'form': wiz.get_form_instance.return_value, } ) self.assertEquals(result, wiz.render_step.return_value)
def test_destroy(self, ssh_command): """Check if destroy runs the required ssh commands""" self.configure_provisoning_server() image_dir = '/opt/robottelo/images' vm = VirtualMachine() with patch.multiple( vm, image_dir=image_dir, _created=True ): vm.destroy() self.assertEqual(ssh_command.call_count, 3) ssh_command_args_list = [ call('virsh destroy {0}'.format(vm.hostname), hostname=self.provisioning_server, connection_timeout=30), call('virsh undefine {0}'.format(vm.hostname), hostname=self.provisioning_server, connection_timeout=30), call('rm {0}/{1}.img'.format(image_dir, vm.hostname), hostname=self.provisioning_server, connection_timeout=30), ] self.assertListEqual(ssh_command.call_args_list, ssh_command_args_list)
def test_render_fallback_markup_comment(self): with patch.multiple('django_comments_xtd.conf.settings', COMMENTS_XTD_MARKUP_FALLBACK_FILTER=None): comment = r'''An [example](http://url.com/ "Title")''' result = render_markup_comment(comment) self.assertEqual(result, r'''An [example](http://url.com/ "Title")''')
def test_render_fallback_markup_comment(self): with patch.multiple('django_comments_xtd.conf.settings', COMMENTS_XTD_MARKUP_FALLBACK_FILTER='markdown'): comment = r'''An [example](http://url.com/ "Title")''' result = render_markup_comment(comment) self.assertEqual(result, '<p>An <a href="http://url.com/" title="Title">example</a></p>')
def _testRequest(self, fn, *args, **kwargs): with patch.multiple( "urllib.request", Request=DEFAULT, urlopen=DEFAULT ) as mock_urllib: with patch("uuid.uuid4") as mock_uuid: with patch("ofxtools.Client.OFXClient.dtclient") as mock_dtclient: mock_dtclient.return_value = datetime(2017, 4, 1, tzinfo=UTC) mock_uuid.return_value = "DEADBEEF" mock_Request = mock_urllib["Request"] mock_Request.return_value = sentinel.REQUEST mock_urlopen = mock_urllib["urlopen"] mock_urlopen.return_value = sentinel.RESPONSE output = fn(*args, **kwargs) args = mock_Request.call_args[0] self.assertEqual(len(args), 1) self.assertEqual(args[0], self.client.url) kwargs = mock_Request.call_args[1] self.assertEqual(kwargs["method"], "POST") self.assertEqual(kwargs["headers"], self.client.http_headers) mock_urlopen.assert_called_once_with( sentinel.REQUEST, context=ANY, timeout = socket._GLOBAL_DEFAULT_TIMEOUT) self.assertEqual(output, sentinel.RESPONSE) return kwargs["data"].decode("utf_8")
def test_hdhomeruninterface_record_syntaxcheck(self): with patch.multiple('recorder.hdhomerun', io=DEFAULT, os=DEFAULT, subprocess=DEFAULT, time=DEFAULT, isaValidRecording=DEFAULT) as patchMocks: hdhomerunInterface = HDHomeRunInterface([], [], '/bin/false') hdhomerunInterface.logger = Mock() hdhomerunInterface.channelMap.getChannelInfo = Mock(autospec=True, return_value=self.channelA) hdhomerunInterface.tunerList.lockTuner = Mock(autospec=True, return_value=self.tunerA) hdhomerunInterface.record(self.channelA.channelMajor, self.channelA.channelMinor, self.stoptime, '/tmp/', '/tmp/')
def test_provides_temporary_workspace_at_runtime(self, mock_tempdir): """ A temporary directory must be created and assigned to runner.workspace at audit runtime. """ # Mocks for the TemporaryDirectory context manager enter_handler = mock_tempdir.return_value.__enter__ exit_handler = mock_tempdir.return_value.__exit__ with patch.multiple( self.runner, organize_files=DEFAULT, process_data=DEFAULT, ): # Inject an assertion about runner.workspace state, to be verified # when runner.process_data gets called. self.runner.process_data.side_effect = ( lambda: self.assertEqual( self.runner.workspace, enter_handler.return_value, # this is the tempdir path "runner.workspace must point to a tempdir at runtime" ) ) self.runner.run() # Assert TemporaryDirectory was called as a context manager enter_handler.assert_called_once_with() exit_handler.assert_called_once_with(None, None, None)
def test_upload_journal_logs(self): ''' test upload_journal_logs() ''' tskey = '__REALTIME_TIMESTAMP' journal = systemd.journal.Reader(path=os.getcwd()) with patch('systemd.journal.Reader', return_value=journal) as journal: with patch('main.JournaldClient', MagicMock(autospec=True)) as reader: reader.return_value.__iter__.return_value = [sentinel.msg1, sentinel.msg2, sentinel.msg3, sentinel.msg4] with patch.multiple(self.client, retain_message=DEFAULT, group_messages=DEFAULT): log_group1 = Mock() log_group2 = Mock() self.client.retain_message.side_effect = [True, False, True, True] self.client.group_messages.return_value = [ ((log_group1, 'stream1'), [sentinel.msg1]), ((log_group2, 'stream2'), [sentinel.msg3, sentinel.msg4]), ] self.client.upload_journal_logs(os.getcwd()) # creates reader reader.assert_called_once_with(journal.return_value, self.CURSOR_CONTENT) # uploads log messages log_group1.log_messages.assert_called_once_with('stream1', [sentinel.msg1]) log_group2.log_messages.assert_called_once_with('stream2', [sentinel.msg3, sentinel.msg4])
def test_generate(self): """ bundle.generate should render the snippets, save them to the filesystem, and mark the bundle as not-expired in the cache for activity stream router! """ bundle = ASRSnippetBundle(self._client(locale='fr', startpage_version=6)) bundle.storage = Mock() bundle.snippets = [self.snippet1, self.snippet2] self.snippet1.render = Mock() self.snippet1.render.return_value = 'snippet1' self.snippet2.render = Mock() self.snippet2.render.return_value = 'snippet2' datetime_mock = Mock() datetime_mock.utcnow.return_value.isoformat.return_value = 'now' with patch.multiple('snippets.base.bundles', datetime=datetime_mock, cache=DEFAULT, default_storage=DEFAULT) as mocks: bundle.generate() self.assertTrue(bundle.filename.endswith('.json')) mocks['default_storage'].save.assert_called_with(bundle.filename, ANY) mocks['cache'].set.assert_called_with(bundle.cache_key, True, ONE_DAY) # Check content of saved file. content_file = mocks['default_storage'].save.call_args[0][1] content_json = json.load(content_file) self.assertEqual(content_json['messages'], ['snippet1', 'snippet2']) self.assertEqual(content_json['metadata']['generated_at'], 'now')
def test_get(self): with patch.multiple( DynaWizard, get_form_instance=DEFAULT, render_step=DEFAULT, ) as mocks: step = 'step' request = self.request_factory.get('') wiz = DynaWizard() result = DynaWizard.as_view()(request, step=step) wiz.get_form_instance.assert_called_with( step=step, form_kwargs={}, ) wiz.render_step.assert_called_with( request, step=step, context={ 'form': wiz.get_form_instance.return_value } ) self.assertEquals(result, wiz.render_step.return_value)
def testRequestStatementsDryrun(self): with patch.multiple( "ofxtools.Client.OFXClient", dtclient=DEFAULT, uuid="DEADBEEF" ) as mock: mock["dtclient"].return_value = datetime(2017, 4, 1, tzinfo=UTC) dryrun = ( self.client.request_statements( "t0ps3kr1t", self.stmtRq0, dryrun=True ) .read() .decode() ) request = ( "OFXHEADER:100\r\n" "DATA:OFXSGML\r\n" "VERSION:103\r\n" "SECURITY:NONE\r\n" "ENCODING:USASCII\r\n" "CHARSET:NONE\r\n" "COMPRESSION:NONE\r\n" "OLDFILEUID:NONE\r\n" "NEWFILEUID:DEADBEEF\r\n" "\r\n" "<OFX>" "<SIGNONMSGSRQV1>" "<SONRQ>" "<DTCLIENT>20170401000000.000[0:GMT]</DTCLIENT>" "<USERID>elmerfudd</USERID>" "<USERPASS>t0ps3kr1t</USERPASS>" "<LANGUAGE>ENG</LANGUAGE>" "<FI>" "<ORG>FIORG</ORG>" "<FID>FID</FID>" "</FI>" "<APPID>{appid}</APPID>" "<APPVER>{appver}</APPVER>" "</SONRQ>" "</SIGNONMSGSRQV1>" "<BANKMSGSRQV1>" "<STMTTRNRQ>" "<TRNUID>DEADBEEF</TRNUID>" "<STMTRQ>" "<BANKACCTFROM>" "<BANKID>123456789</BANKID>" "<ACCTID>111111</ACCTID>" "<ACCTTYPE>CHECKING</ACCTTYPE>" "</BANKACCTFROM>" "<INCTRAN>" "<DTSTART>20170101000000.000[0:GMT]</DTSTART>" "<DTEND>20170331000000.000[0:GMT]</DTEND>" "<INCLUDE>Y</INCLUDE>" "</INCTRAN>" "</STMTRQ>" "</STMTTRNRQ>" "</BANKMSGSRQV1>" "</OFX>" ).format(appid=DEFAULT_APPID, appver=DEFAULT_APPVER) self.assertEqual(dryrun, request)
def test_validate(self): with patch.multiple('snippets.base.admin.fields', ChoiceField=DEFAULT, ModelChoiceField=DEFAULT): field = JEXLAddonField() field.validate('installed,{}'.format(self.addon.id)) field.fields[0].validate.assert_called_with('installed') field.fields[1].validate.assert_called_with(str(self.addon.id))
def patch_jupyter_dirs(): """ Patch jupyter paths to use temporary directories. This just creates the patches and directories, caller is still responsible for starting & stopping patches, and removing temp dir when appropriate. """ test_dir = tempfile.mkdtemp(prefix='jupyter_') jupyter_dirs = {name: make_dirs(test_dir, name) for name in ( 'user_home', 'env_vars', 'system', 'sys_prefix', 'custom', 'server')} jupyter_dirs['root'] = test_dir for name in ('notebook', 'runtime'): d = jupyter_dirs['server'][name] = os.path.join( test_dir, 'server', name) if not os.path.exists(d): os.makedirs(d) # patch relevant environment variables jupyter_patches = [] jupyter_patches.append( patch.dict('os.environ', stringify_env({ 'HOME': jupyter_dirs['user_home']['root'], 'JUPYTER_CONFIG_DIR': jupyter_dirs['env_vars']['conf'], 'JUPYTER_DATA_DIR': jupyter_dirs['env_vars']['data'], 'JUPYTER_RUNTIME_DIR': jupyter_dirs['server']['runtime'], }))) # patch jupyter path variables in various modules # find the appropriate modules to patch according to compat. # Should include either # notebook.nbextensions # or # jupyter_contrib_core.notebook_compat._compat.nbextensions modules_to_patch = set([ jupyter_core.paths, sys.modules[nbextensions._get_config_dir.__module__], sys.modules[nbextensions._get_nbextension_dir.__module__], ]) path_patches = dict( SYSTEM_CONFIG_PATH=[jupyter_dirs['system']['conf']], ENV_CONFIG_PATH=[jupyter_dirs['sys_prefix']['conf']], SYSTEM_JUPYTER_PATH=[jupyter_dirs['system']['data']], ENV_JUPYTER_PATH=[jupyter_dirs['sys_prefix']['data']], ) for mod in modules_to_patch: applicable_patches = { attrname: newval for attrname, newval in path_patches.items() if hasattr(mod, attrname)} jupyter_patches.append( patch.multiple(mod, **applicable_patches)) def remove_jupyter_dirs(): """Remove all temporary directories created.""" shutil.rmtree(test_dir) return jupyter_patches, jupyter_dirs, remove_jupyter_dirs
def setUp(self): patcher = patch.multiple(Bowtie2, __init__=Mock(return_value=None), yield_output=DEFAULT) self.bowtie2_output = [] mocks = patcher.start() mocks['yield_output'].return_value = self.bowtie2_output self.addCleanup(patcher.stop) patcher = patch.multiple(Bowtie2Build, __init__=Mock(return_value=None), build=DEFAULT) patcher.start() self.addCleanup(patcher.stop) patcher = patch.object(ProjectConfig, 'loadDefault') mock_projects = patcher.start() self.addCleanup(patcher.stop) mock_projects.return_value.getAllReferences.return_value = {'R1': 'GTGGG'} patcher = patch('micall.core.remap.is_short_read', Mock(return_value=False)) patcher.start() self.addCleanup(patcher.stop)
def test_runner_keeps_reference_to_report_path(self): """ The runner instance keeps a reference to the report final after the data processing phase. """ with patch.multiple(self.runner, organize_files=DEFAULT, process_data=DEFAULT): self.assertIsNone(self.runner.report_path) self.runner.run() self.assertIsNotNone(self.runner.report_path)
def test_setup_connected(self): """Test setup when connection succeeds.""" with patch.multiple('pmsensor.co2sensor', read_mh_z19=DEFAULT, read_mh_z19_with_temperature=DEFAULT): from pmsensor.co2sensor import read_mh_z19_with_temperature read_mh_z19_with_temperature.return_value = None mock_add = Mock() self.assertTrue(mhz19.setup_platform(self.hass, { 'platform': 'mhz19', 'monitored_conditions': ['co2', 'temperature'], mhz19.CONF_SERIAL_DEVICE: 'test.serial', }, mock_add)) self.assertEqual(1, mock_add.call_count)
def test_run_calls_anciliary_methods(self): "runner.run must call a series of anciliary methods" # patch anciliary methods with patch.multiple(self.runner, organize_files=DEFAULT, process_data=DEFAULT, post_process=DEFAULT): self.runner.run() # Check calls on anciliary methods self.runner.organize_files.assert_called_once_with() self.runner.process_data.assert_called_once_with() self.runner.post_process.assert_called_once_with()
def setUp(self): super().setUp() self.data = { 'upload_id': 'some-valid-upload-id', 'binary_filesize': 123456, 'source_uploaded': False, } self.package_name = 'namespace.binary' patcher = patch.multiple( 'snapcraft.storeapi._upload', SCAN_STATUS_POLL_DELAY=0.0001) patcher.start() self.addCleanup(patcher.stop)
def test_dont_create_if_already_created( self, ssh_command, sleep): """Check if the creation steps are run more than once""" self.configure_provisoning_server() vm = VirtualMachine() with patch.multiple( vm, image_dir='/opt/robottelo/images', provisioning_server='provisioning.example.com' ): vm.create() vm.create() self.assertEqual(vm.ip_addr, '192.168.0.1') self.assertEqual(ssh_command.call_count, 3)
def test_generate_request_header_custom_service(self): with patch.multiple("gssapi.SecurityContext", __init__=fake_init, step=fake_resp): response = requests.Response() response.url = "http://www.example.org/" response.headers = {'www-authenticate': b64_negotiate_token} host = urlparse(response.url).hostname auth = requests_gssapi.HTTPKerberosAuth(service="barfoo") auth.generate_request_header(response, host), fake_init.assert_called_with( name=gssapi_sname("*****@*****.**"), usage="initiate", flags=gssflags, creds=None, mech=SPNEGO) fake_resp.assert_called_with(b"token")
def check_sum_of_calls(object_, methods, maximum_calls, minimum_calls=1, include_arguments=True, stack_depth=1): """ Instruments the given methods on the given object to verify that the total sum of calls made to the methods falls between minumum_calls and maximum_calls. """ mocks = { method: StackTraceCounter.capture_call( getattr(object_, method), stack_depth=stack_depth + 3, # dictcomp + check_sum_of_calls + contextmanager include_arguments=include_arguments ) for method in methods } with patch.multiple(object_, **mocks): yield call_count = sum(capture_fn.stack_counter.total_calls for capture_fn in mocks.values()) messages = [] # Assertion errors don't handle multi-line values, so pretty-print to std-out instead if not minimum_calls <= call_count <= maximum_calls: messages = ["Expected between {} and {} calls, {} were made.\n\n".format( minimum_calls, maximum_calls, call_count, )] for method_name, capture_fn in mocks.items(): stack_counter = capture_fn.stack_counter messages.append("{!r} was called {} times:\n".format( method_name, stack_counter.total_calls )) for stack in stack_counter: messages.append(" called {} times:\n\n".format(stack_counter.stack_calls(stack))) messages.append(" " + " ".join(traceback.format_list(stack))) messages.append("\n\n") if include_arguments: for (args, kwargs), count in stack_counter[stack].items(): messages.append(f" called {count} times with:\n") messages.append(f" args: {args}\n") messages.append(" kwargs: {}\n\n".format(dict(kwargs))) # verify that we called the methods within the desired range assert minimum_calls <= call_count <= maximum_calls, "".join(messages)
def mock_nbhelper(): with patch('shutil.chown'): with patch('pathlib.Path.mkdir'): with patch('illumidesk.apis.nbgrader_service.Gradebook'): with patch.multiple( 'illumidesk.apis.nbgrader_service.NbGraderServiceHelper', # __init__=lambda x, y: None, update_course=Mock(return_value=None), create_database_if_not_exists=Mock(), add_user_to_nbgrader_gradebook=Mock(return_value=None), register_assignment=Mock(return_value=None), get_course=Mock(return_value=Course( id='123', lms_lineitems_endpoint= 'canvas.docker.com/api/lti/courses/1/line_items')), ) as mock_nb: yield mock_nb
def test_max_piece_size_is_no_power_of_two(capsys, mock_content): # Create large sparse file, i.e. a file that isn't actually written to disk large_file = mock_content / 'large file' with open(large_file, 'ab') as f: f.truncate(2**40) content_path = str(mock_content) with patch.multiple('torfcli._main', _hash_pieces=DEFAULT, _write_torrent=DEFAULT): factor = 1.234 exp_invalid_piece_size = int(factor * 2**20) with patch('sys.exit') as mock_exit: run([content_path, '--max-piece-size', str(factor)]) mock_exit.assert_called_once_with(err.Code.CLI) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: Piece size must be a power of 2: {exp_invalid_piece_size}\n'
def test_valid_single_line_item(self): order = Order() order.id = 40 order.save() line_items = [{ "properties": [{ "name": "email", "value": "*****@*****.**" }], "sku": "course-v1:org+course+run1" }] mock_get_course_by_id = Mock(return_value=self.course) mock_get_email_params = Mock(return_value=self.email_params) mock_enroll_email = Mock() for line_item in line_items: with patch.multiple(utils, get_course_by_id=mock_get_course_by_id, get_email_params=mock_get_email_params, enroll_email=mock_enroll_email): order_item = process_line_item(order, line_item) # Did we mock-fetch the course with the correct locator? mock_get_course_by_id.assert_called_once_with(self.cl) # Did we mock-fetch the email params for the course # identified by that locator? mock_get_email_params.assert_called_once_with(self.course, True, secure=True) # Did we mock-invoke enroll_email with the correct parameters? mock_enroll_email.assert_called_once_with( self.cl, '*****@*****.**', # noqa: E501 auto_enroll=True, email_students=True, email_params=self.email_params, # noqa: E501 language=None) # Read back the order item order_item.refresh_from_db() self.assertEqual(order_item.order, order) self.assertEqual(order_item.sku, 'course-v1:org+course+run1') self.assertEqual(order_item.email, '*****@*****.**') self.assertEqual(order_item.status, OrderItem.PROCESSED)
def test_report_file_is_persisted_elsewhere(self): """ `shutil.move` must be called using the filepath from runner.output and runner.report_path. """ mock_output = Mock(return_value='new_file_path') with patch.multiple( self.runner, organize_files=DEFAULT, process_data=DEFAULT, ): self.runner.run() self.mock_shutil.move.assert_called_once_with( src=self.runner.output, dst=self.runner.report_path )
def start(self): self.test_dir = td = TemporaryDirectory() self.env_patch = patch.dict(os.environ, { 'JUPYTER_CONFIG_DIR': pjoin(td.name, 'jupyter'), 'JUPYTER_DATA_DIR': pjoin(td.name, 'jupyter_data'), 'JUPYTER_RUNTIME_DIR': pjoin(td.name, 'jupyter_runtime'), 'IPYTHONDIR': pjoin(td.name, 'ipython'), }) self.env_patch.start() self.path_patch = patch.multiple( jupyter_core.paths, SYSTEM_JUPYTER_PATH=[pjoin(td.name, 'share', 'jupyter')], ENV_JUPYTER_PATH=[pjoin(td.name, 'env', 'share', 'jupyter')], SYSTEM_CONFIG_PATH=[pjoin(td.name, 'etc', 'jupyter')], ENV_CONFIG_PATH=[pjoin(td.name, 'env', 'etc', 'jupyter')], ) self.path_patch.start()
def test_env_only_calls_unset(): context = Context({ 'key1': 'value1', 'key2': 'value2', 'key3': 'value3', 'envUnset': ['ARB_DELETE_ME1', 'ARB_DELETE_ME2'] }) with patch.multiple('pypyr.steps.env', env_get=DEFAULT, env_set=DEFAULT, env_unset=DEFAULT) as mock_env: pypyr.steps.env.run_step(context) mock_env['env_get'].assert_not_called() mock_env['env_set'].assert_not_called() mock_env['env_unset'].assert_called_once()
def _test_workflow( self, send_command, download_command, commands, result, missing_data ): self.communicator_mock.send_command = send_command with patch.multiple( TRANSFER, **self.get_patches(send_command, download_command), ): result = run_async( transfer.transfer_data(self.communicator_mock, missing_data) ) self.assertEqual(send_command.call_count, len(commands) - 1) expected_args_list = [] for message, _ in commands[1:]: expected_args_list.append(call(message)) self.assertEqual(expected_args_list, send_command.call_args_list) return result
def test_reuses_collection_for_same_path(self): self.PostCollection = Mock() patcher = patch.multiple( 'website_editor.controllers', PostCollection=self.PostCollection, load_posts=lambda path: path, ) patcher.start() self.addCleanup(patcher.stop) from .controllers import PostController from .models import PostCollection controller1 = PostController('path1') controller2 = PostController('path1') self.PostCollection.assert_called_once_with('path1') controller3 = PostController('path2') self.PostCollection.assert_called_with('path2')
def test_generate_request_header_init_error(self): with patch.multiple("gssapi.SecurityContext", __init__=fake_init, step=fail_resp): response = requests.Response() response.url = "http://www.example.org/" response.headers = {'www-authenticate': b64_negotiate_token} host = urlparse(response.url).hostname auth = requests_gssapi.HTTPKerberosAuth() self.assertRaises(requests_gssapi.exceptions.SPNEGOExchangeError, auth.generate_request_header, response, host) fake_init.assert_called_with( name=gssapi_sname("*****@*****.**"), usage="initiate", flags=gssflags, creds=None, mech=SPNEGO)
def test_handle_response_200_mutual_auth_required_failure(self): with patch.multiple("gssapi.SecurityContext", __init__=fake_init, step=fake_resp): response_ok = requests.Response() response_ok.url = "http://www.example.org/" response_ok.status_code = 200 response_ok.headers = {} auth = requests_gssapi.HTTPKerberosAuth( mutual_authentication=REQUIRED) auth.context = {"www.example.org": "CTX"} self.assertRaises(requests_gssapi.MutualAuthenticationError, auth.handle_response, response_ok) self.assertFalse(fake_resp.called)
def test_handle_response_401_rejected(self): # Get a 401 from server, authenticate, and get another 401 back. # Ensure there is no infinite recursion. with patch.multiple("gssapi.SecurityContext", __init__=fake_init, step=fake_resp): connection = Mock() def connection_send(self, *args, **kwargs): reject = requests.Response() reject.url = "http://www.example.org/" reject.status_code = 401 reject.connection = connection return reject connection.send.side_effect = connection_send raw = Mock() raw.release_conn.return_value = None request = requests.Request() response = requests.Response() response.request = request response.url = "http://www.example.org/" response.headers = {'www-authenticate': b64_negotiate_token} response.status_code = 401 response.connection = connection response._content = "" response.raw = raw auth = requests_gssapi.HTTPKerberosAuth() r = auth.handle_response(response) self.assertEqual(r.status_code, 401) self.assertEqual(request.headers['Authorization'], b64_negotiate_response) connection.send.assert_called_with(request) raw.release_conn.assert_called_with() fake_init.assert_called_with( name=gssapi_sname("*****@*****.**"), usage="initiate", flags=gssflags, creds=None, mech=SPNEGO) fake_resp.assert_called_with(b"token")
def test_create_default_doc_template(self): options_dict = {'doc_template': 'doc', 'scheme_template': 'scheme'} options = CombinedOptions(options_dict, defaults={ **options_dict, 'scheme_template': 'modified' }) mock_resource = Mock(side_effect=['doc_resource']) with patch.multiple('pgsqldoc.pgsqldoc', copy_if_not_exists=DEFAULT, resource_filename=mock_resource) as mocks: Preprocessor._create_default_templates(self.preprocessor, options) self.assertEqual(mocks['copy_if_not_exists'].mock_calls, [ call( self.preprocessor.project_path / options_dict['doc_template'], 'doc_resource') ])
def test_eligible_eligible(self, discount_percentage, base_price, formatted_base_price, final_price): with patch.multiple( utils, can_receive_discount=Mock(return_value=True), get_course_prices=Mock(return_value=(base_price, None)), discount_percentage=Mock(return_value=discount_percentage) ): content, has_discount = utils.format_strikeout_price(Mock(name='user'), Mock(name='course')) assert str(content) == ( "<span class='sr-only'>" "Original price: <span class='price original'>{original_price}</span>, discount price: " "</span>" "<span class='price discount'>{discount_price}</span> " "<del aria-hidden='true'><span class='price original'>{original_price}</span></del>" ).format(original_price=formatted_base_price, discount_price=final_price) assert has_discount
def test_handle_response_500_mutual_auth_required_failure(self): with patch.multiple("gssapi.SecurityContext", __init__=fake_init, step=fail_resp): response_500 = requests.Response() response_500.url = "http://www.example.org/" response_500.status_code = 500 response_500.headers = {} response_500.request = "REQUEST" response_500.connection = "CONNECTION" response_500._content = "CONTENT" response_500.encoding = "ENCODING" response_500.raw = "RAW" response_500.cookies = "COOKIES" auth = requests_gssapi.HTTPKerberosAuth( mutual_authentication=REQUIRED) auth.context = {"www.example.org": "CTX"} r = auth.handle_response(response_500) self.assertTrue( isinstance(r, requests_gssapi.gssapi_.SanitizedResponse)) self.assertNotEqual(r, response_500) self.assertNotEqual(r.headers, response_500.headers) self.assertEqual(r.status_code, response_500.status_code) self.assertEqual(r.encoding, response_500.encoding) self.assertEqual(r.raw, response_500.raw) self.assertEqual(r.url, response_500.url) self.assertEqual(r.reason, response_500.reason) self.assertEqual(r.connection, response_500.connection) self.assertEqual(r.content, '') self.assertNotEqual(r.cookies, response_500.cookies) self.assertFalse(fail_resp.called) # re-test with error response sanitizing disabled auth = requests_gssapi.HTTPKerberosAuth( sanitize_mutual_error_response=False) auth.context = {"www.example.org": "CTX"} r = auth.handle_response(response_500) self.assertFalse( isinstance(r, requests_gssapi.gssapi_.SanitizedResponse))
def test_all(self): actions = [ 'something', {'type': 'foo'}, {'type': 'notify'}, {'type': 'mark'}, {'type': 'tag'}, {'type': 'mark-for-op'}, {'type': 'remove-tag'}, {'type': 'unmark'}, {'type': 'untag'}, {'type': 'bar'}, ] with patch.multiple( pb, _fix_tag_action=DEFAULT, _fix_mark_for_op_action=DEFAULT, _fix_untag_action=DEFAULT, autospec=True ) as mocks: mocks['_fix_tag_action'].return_value = {'fixed': 'tag'} mocks['_fix_mark_for_op_action'].return_value = {'fixed': 'op'} mocks['_fix_untag_action'].return_value = {'fixed': 'untag'} res = self.cls._fix_actions(actions) assert res == [ {'type': 'notify'}, {'fixed': 'tag'}, {'fixed': 'tag'}, {'fixed': 'op'}, {'fixed': 'untag'}, {'fixed': 'untag'}, {'fixed': 'untag'} ] assert mocks['_fix_tag_action'].mock_calls == [ call(self.cls, {'type': 'mark'}), call(self.cls, {'type': 'tag'}) ] assert mocks['_fix_mark_for_op_action'].mock_calls == [ call(self.cls, {'type': 'mark-for-op'}) ] assert mocks['_fix_untag_action'].mock_calls == [ call(self.cls, {'type': 'remove-tag'}), call(self.cls, {'type': 'unmark'}), call(self.cls, {'type': 'untag'}), ]
def test_handle_response_401(self): # Get a 401 from server, authenticate, and get a 200 back. with patch.multiple("gssapi.SecurityContext", __init__=fake_init, step=fake_resp): response_ok = requests.Response() response_ok.url = "http://www.example.org/" response_ok.status_code = 200 response_ok.headers = {'www-authenticate': b64_negotiate_server} connection = Mock() connection.send = Mock(return_value=response_ok) raw = Mock() raw.release_conn = Mock(return_value=None) request = requests.Request() response = requests.Response() response.request = request response.url = "http://www.example.org/" response.headers = {'www-authenticate': b64_negotiate_token} response.status_code = 401 response.connection = connection response._content = "" response.raw = raw auth = requests_gssapi.HTTPKerberosAuth() auth.handle_other = Mock(return_value=response_ok) r = auth.handle_response(response) self.assertTrue(response in r.history) auth.handle_other.assert_called_once_with(response_ok) self.assertEqual(r, response_ok) self.assertEqual(request.headers['Authorization'], b64_negotiate_response) connection.send.assert_called_with(request) raw.release_conn.assert_called_with() fake_init.assert_called_with( name=gssapi_sname("*****@*****.**"), usage="initiate", flags=gssflags, creds=None, mech=SPNEGO) fake_resp.assert_called_with(b"token")
def test_git_tag_dirty(self): with patch.multiple( pb, autospec=True, _find_git_info=DEFAULT, _find_pip_info=DEFAULT, _find_pkg_info=DEFAULT, ) as mocks: mocks['_find_git_info'].return_value = { 'remotes': { 'origin': 'git+https://foo' }, 'tag': 'mytag', 'commit': '12345678', 'dirty': True } mocks['_find_pip_info'].return_value = { 'version': '1.2.3', 'url': 'http://my.package.url/pip' } mocks['_find_pkg_info'].return_value = { 'version': '1.2.3', 'url': 'http://my.package.url/pkg_resources' } with patch('%s._git_repo_path' % pb, new_callable=PropertyMock) as mock_is_git: mock_is_git.return_value = '/git/repo/.git' res = self.cls.find_package_version() assert res.as_dict == VersionInfo( pip_version='1.2.3', pip_url='http://my.package.url/pip', pkg_resources_version='1.2.3', pkg_resources_url='http://my.package.url/pkg_resources', git_commit='12345678', git_is_dirty=True, git_tag='mytag', git_remotes={ 'origin': 'git+https://foo' }).as_dict assert mocks['_find_git_info'].mock_calls == [ call(self.cls, '/git/repo/.git') ] assert mocks['_find_pip_info'].mock_calls == [call(self.cls)] assert mocks['_find_pkg_info'].mock_calls == [call(self.cls)] assert mock_is_git.mock_calls == [call()]
async def _run_on_infrastructure(meth, *args, **kwargs): """Start the Manager infrastructure and call the given callable. The method given is run through a serializing wrapper, so that Django database accesses are correct. :param meth: The callable to run on the infrastructure. All other arguments are forwarded to it. """ with TestingContext(): _create_test_dirs() overrides, zmq_socket = _prepare_settings() with overrides: with patch.multiple( "resolwe.storage.settings", STORAGE_CONNECTORS=resolwe_settings.STORAGE_CONNECTORS, FLOW_VOLUMES=resolwe_settings.FLOW_VOLUMES, ): connectors.recreate_connectors() await database_sync_to_async(_manager_setup)() hosts = settings.FLOW_EXECUTOR["LISTENER_CONNECTION"]["hosts"] port = settings.FLOW_EXECUTOR["LISTENER_CONNECTION"]["port"] protocol = settings.FLOW_EXECUTOR["LISTENER_CONNECTION"][ "protocol"] listener.hosts = hosts listener.port = port listener.protocol = protocol listener.zmq_socket = zmq_socket async with listener: try: with override_settings( FLOW_MANAGER_SYNC_AUTO_CALLS=True): # Run the test in the new thread instead on the # main thread (default). If test is started on the # main thread then database_sync_to_async calls # will wait indefinitely (they share the same # thread) for the test to finish creating the # infinite loop. result = await database_sync_to_async( meth, thread_sensitive=False)(*args, **kwargs) return result except Exception: logger.exception("Exception while running test") finally: logger.debug("test_runner: Terminating listener")
def test_path_file_found_widget(self): request, get_object_or_404_mock, build_downloadfile_response_mock = self.build_mocks( 'widget') response_mock = Mock() response_mock.status_code = 200 build_downloadfile_response_mock.return_value = response_mock with self.settings(USE_XSENDFILE=False): with patch.multiple( 'wirecloud.catalogue.views', get_object_or_404=get_object_or_404_mock, build_downloadfile_response=build_downloadfile_response_mock ): response = serve_catalogue_media(request, 'Wirecloud', 'Test', '1.0', 'image/catalogue.png') self.assertEqual(response, response_mock)
def wrapper(*args, **kwargs): with patch.multiple('koji', read_config=DEFAULT, grab_session_options=DEFAULT, ClientSession=DEFAULT) as mocks: ClientSession = mocks['ClientSession'] session = MagicMock() ClientSession.return_value = session session.getBuild.side_effect = _koji_get_build session.getPackageID.side_effect = _koji_get_package_id session.listArchives.side_effect = _koji_list_archives session.listBuilds.side_effect = _koji_list_builds session.listTagged.side_effect = _koji_list_tagged session.listTags.side_effect = _koji_list_tags return f(*args, **kwargs)
def test_abstract_converter(): # Create Mock Patch Abstract Base Classes abc_patch = patch.multiple(colors.Converter, __abstractmethods__=set()) abc_patch.start() cmf_name = colors.get_avail_cmfs()[0] illuminant_name = colors.get_avail_illuminants()[0] test_converter = colors.Converter(cmf=cmf_name, illuminant=illuminant_name) assert cmf_name == test_converter.cmf_name assert illuminant_name == test_converter.illuminant_name assert test_converter.to_xyz() is None assert test_converter.to_rgb() is None abc_patch.stop() return
def test_safely_save_mapping_coin_address_exists(self): mapping = MappingEntry(coin_address='34435', waves_address='3432') with patch.multiple(self._map_storage, waves_address_exists=MagicMock(), coin_address_exists=MagicMock(), save_mapping=MagicMock()): self._map_storage.waves_address_exists.return_value = False self._map_storage.coin_address_exists.return_value = True with self.assertRaises(DuplicateMappingError): self._map_storage.safely_save_mapping(mapping) self._map_storage.waves_address_exists.assert_called_once_with( mapping.waves_address) self._map_storage.coin_address_exists.assert_called_once_with( mapping.coin_address) self._map_storage.save_mapping.assert_not_called()
def qgs_access_control_filter(): """ Mock some QgsAccessControlFilter methods: - __init__ which does not accept a mocked QgsServerInterface; - serverInterface to return the right server_iface. """ class DummyQgsAccessControlFilter(): def __init__(self, server_iface): self.server_iface = server_iface def serverInterface(self): # noqa return self.server_iface with patch.multiple('geomapfish_qgisserver.accesscontrol.QgsAccessControlFilter', __init__=DummyQgsAccessControlFilter.__init__, serverInterface=DummyQgsAccessControlFilter.serverInterface) as mocks: yield mocks
def test_insertData(self): testData = {'title': 'Test Instance'} testInstance = Instance() with patch.multiple(Instance, cleanData=DEFAULT, addAgents=DEFAULT, addIdentifiers=DEFAULT, addAltTitles=DEFAULT, addMeasurements=DEFAULT, addLinks=DEFAULT, addDates=DEFAULT, addRights=DEFAULT, insertLanguages=DEFAULT, insertItems=DEFAULT) as inst_mocks: inst_mocks['insertItems'].return_value = ['epub'] newEpubs = testInstance.insertData(testData) self.assertEqual(testInstance.title, 'Test Instance') self.assertEqual(newEpubs, ['epub'])
def setUp(self) -> None: # MotionDetectorSettings self._md_settings_mock = create_autospec(spec=MotionDetectorSettings, spec_set=True) type(self._md_settings_mock).impl_type = PropertyMock(return_value=ImplementationType.DUMMY) # settings should return MotionDetectorSettings mock on load_settings self._md_settings_mock.load_settings = MagicMock(return_value=self._md_settings_mock) # DummyGpioSensorSettings self._dummy_sensor_settings_mock = create_autospec(spec=DummyGpioSensorSettings, spec_set=True) self._dummy_sensor_settings_mock.load_settings = MagicMock(return_value=self._dummy_sensor_settings_mock) self._patcher = patch.multiple("camguard.bridge_api", MotionDetectorSettings=self._md_settings_mock, DummyGpioSensorSettings=self._dummy_sensor_settings_mock) self._patcher.start() self._config_path = "." # motion detector mocked with dummy gpio sensor settings by default self.sut = MotionDetector(self._config_path)
def setUp(self) -> None: # FileStorageSettings self._fs_settings_mock = create_autospec(spec=FileStorageSettings, spec_set=True) type(self._fs_settings_mock).impl_type = PropertyMock(return_value=ImplementationType.DUMMY) # settings should return FileStorageSettings mock on load_settings self._fs_settings_mock.load_settings = MagicMock(return_value=self._fs_settings_mock) # DummyGDriveStorageSettings self._dummy_storage_settings_mock = create_autospec(spec=DummyGDriveStorageSettings, spec_set=True) self._dummy_storage_settings_mock.load_settings = MagicMock(return_value=self._dummy_storage_settings_mock) self._patcher = patch.multiple("camguard.bridge_api", FileStorageSettings=self._fs_settings_mock, DummyGDriveStorageSettings=self._dummy_storage_settings_mock) self._patcher.start() self._config_path = "." # file storage mocked with dummy gdrive storage settings by default self.sut = FileStorage(self._config_path)
def test_node_name_function(self): elem = ElementTree.Element('root') attr = AttributeNode('a1', '20') namespace = NamespaceNode('xs', 'http://www.w3.org/2001/XMLSchema') self.assertEqual(node_name(elem), 'root') self.assertEqual(node_name(attr), 'a1') self.assertEqual(node_name(namespace), 'xs') self.assertIsNone(node_name(())) self.assertIsNone(node_name(None)) with patch.multiple(DummyXsdType, is_simple=lambda x: True): xsd_type = DummyXsdType() typed_elem = TypedElement(elem=elem, xsd_type=xsd_type, value=10) self.assertEqual(node_name(typed_elem), 'root') typed_attr = TypedAttribute(attribute=attr, xsd_type=xsd_type, value=20) self.assertEqual(node_name(typed_attr), 'a1')
def test_artifactory_api_service_should_update_returns_true_if_ldap_group_settings_are_different( self): ldap_settings = {'ldapSetting': self.LDAP_SETTING} ldap_group_settings = { 'ldapGroupSetting': { "name": "prod-ldap-group2", "groupBaseDn": "dc=example,dc=com" } } with patch.multiple( artifactory_system_configuration.ArtifactoryLdapApiService, get_ldap_configs=DEFAULT) as values: values[ 'get_ldap_configs'].return_value = ldap_settings, ldap_group_settings artifactory_api_service = artifactory_system_configuration.ArtifactoryLdapApiService( self.DOMAIN, self.USERNAME, self.PASSWORD, self.LDAP_SETTING, self.LDAP_GROUP_SETTING, "present") assert artifactory_api_service.should_update()
def test_update_resource_catalogue_cache_autoremove_unsupported_resource(self): mac1, mac2 = self.build_mac_mocks() parser_mac1 = Mock() parser_mac1.get_resource_info.return_value = "mac1_json" orm = Mock() orm.CatalogueResource.objects.all.return_value = TestQueryResult([mac1, mac2]) with patch.multiple('wirecloud.catalogue.utils', WgtFile=DEFAULT, TemplateParser=DEFAULT, autospec=True) as context: context['TemplateParser'].side_effect = (parser_mac1, TemplateParseException('test')) with self.settings(WIRECLOUD_REMOVE_UNSUPPORTED_RESOURCES_MIGRATION=True): update_resource_catalogue_cache(orm) self.assertTrue(mac1.save.called) self.assertFalse(mac1.delete.called) self.assertTrue(mac2.delete.called)
def test_bbcleanup(self): fs = mockfs.MockFileSystem() fs.setDictionary({ 'Chapter 13 Problems_jdoe3_attempt_2014-04-30-21-02-38_chapter (13).pdf':'', 'Chapter 13 Problems_jdoe3_attempt_2014-04-30-21-02-38.txt': ''' There are no student comments for this assignment There is no student submission text data for this assignment. ''', }) with patch.multiple(os, listdir=fs.listdir, rename=fs.rename, chdir=fs.chdir, remove=fs.remove): with patch('os.path.isfile', fs.isfile): with patch('bbcleanup.getFileContents', fs.getFileContents): with patch('sys.argv', ['', 'anything']): bbcleanup() self.assertListEqual(['jdoe3-chapter13.pdf'], fs.listdir())
def setup_class(cls): cls.tmp_dir = TemporaryDirectory() def tmp(*parts): path = os.path.join(cls.tmp_dir.name, *parts) try: os.makedirs(path) except OSError as e: if e.errno != errno.EEXIST: raise return path cls.home_dir = tmp('home') data_dir = cls.data_dir = tmp('data') config_dir = cls.config_dir = tmp('config') runtime_dir = cls.runtime_dir = tmp('runtime') lab_dir = cls.lab_dir = tmp('lab') lab_settings = cls.lab_settings = tmp('labsettings') lab_workspaces = cls.lab_workspaces = tmp('labworkspaces') cls.notebook_dir = tmp('notebooks') cls.env_patch = patch.dict('os.environ', { 'HOME': cls.home_dir, 'PYTHONPATH': os.pathsep.join(sys.path), 'IPYTHONDIR': pjoin(cls.home_dir, '.ipython'), 'JUPYTER_NO_CONFIG': '1', # needed in the future 'JUPYTER_CONFIG_DIR': config_dir, 'JUPYTER_DATA_DIR': data_dir, 'JUPYTER_RUNTIME_DIR': runtime_dir, 'JUPYTERLAB_DIR': lab_dir, 'JUPYTERLAB_SETTINGS_DIR': lab_settings }) cls.env_patch.start() cls.path_patch = patch.multiple( jupyter_core.paths, SYSTEM_JUPYTER_PATH=[tmp('share', 'jupyter')], ENV_JUPYTER_PATH=[tmp('env', 'share', 'jupyter')], SYSTEM_CONFIG_PATH=[tmp('etc', 'jupyter')], ENV_CONFIG_PATH=[tmp('env', 'etc', 'jupyter')], ) cls.path_patch.start() config = cls.config or Config() config.NotebookNotary.db_file = ':memory:' cls.token = hexlify(os.urandom(4)).decode('ascii') started = Event() def start_thread(): if 'asyncio' in sys.modules: import asyncio asyncio.set_event_loop(asyncio.new_event_loop()) app = cls.notebook = LabApp( app_dir=lab_dir, port=cls.port, user_settings_dir=lab_settings, workspaces_dir=lab_workspaces, port_retries=0, open_browser=False, config_dir=cls.config_dir, data_dir=cls.data_dir, runtime_dir=cls.runtime_dir, notebook_dir=cls.notebook_dir, base_url=cls.url_prefix, config=config, allow_root=True, token=cls.token, ) # don't register signal handler during tests app.init_signal = lambda: None # clear log handlers and propagate to root for nose to capture it # needs to be redone after initialize, which reconfigures logging app.log.propagate = True app.log.handlers = [] app.initialize(argv=[]) app.log.propagate = True app.log.handlers = [] loop = IOLoop.current() loop.add_callback(started.set) try: app.start() finally: # set the event, so failure to start doesn't cause a hang started.set() app.session_manager.close() cls.notebook_thread = Thread(target=start_thread) cls.notebook_thread.daemon = True cls.notebook_thread.start() started.wait() cls.wait_until_alive()
d1 = dict(self.__dict__) d1.pop("location", None) t1 = d1.pop("__type__", None) d2 = dict(other.__dict__) d2.pop("location", None) t2 = d2.pop("__type__", None) if d1 == d2: if t1 == t2: return True else: raise AssertionError("Types mismatch {!r} != {!r} for expression " "`{!r}`".format(t1, t2, self)) else: return False NODE_EQ_PATCHER = patch.multiple(Node, __eq__=_node_eq, __ne__=_ne) def _strict_type_eq(self, other): if type(self) is not type(other): return False d1 = dict(self.__dict__) d1.pop("__dict__") d1.pop("__weakref__") d1.pop("__backref__", None) d2 = dict(other.__dict__) d2.pop("__dict__") d2.pop("__weakref__") d2.pop("__backref__", None) return d1 == d2
def test_mail_does_not_contain_html_part(self): with patch.multiple('django_comments_xtd.conf.settings', COMMENTS_XTD_SEND_HTML_EMAIL=False): self.client.post(reverse("comments-post-comment"), data=self.data) self.assert_(self.mock_mailer.call_count == 1) self.assert_(self.mock_mailer.call_args[1]['html'] is None)
def namedconf(): with tempfile.NamedTemporaryFile('w+') as f: with patch.multiple(paths, NAMED_CONF=f.name, NAMED_CRYPTO_POLICY_FILE=POLICY_FILE): yield f.name
def patch_multiple(self, *args, **kwargs): patcher = patch.multiple(*args, **kwargs) thing = patcher.start() self.addCleanup(patcher.stop) return thing