def test_pip(self, mock_run): self.options.requirements = 'requirements.txt' self.options.python_packages = ['test', 'packages'] plugin = python3.Python3Plugin('test-part', self.options, self.project_options) easy_install = os.path.join( plugin.installdir, 'usr', 'bin', 'easy_install3') prefix = os.path.join(plugin.installdir, 'usr') pip3 = os.path.join(plugin.installdir, 'usr', 'bin', 'pip3') pip_install = ['python3', pip3, 'install', '--root', plugin.installdir, '--install-option=--prefix=usr'] requirements_path = os.path.join(plugin.sourcedir, 'requirements.txt') calls = [ mock.call(['python3', easy_install, '--prefix', prefix, 'pip']), mock.call(pip_install + ['--requirement', requirements_path]), mock.call(pip_install + ['--upgrade', 'test', 'packages']), mock.call(pip_install + ['.'], cwd=plugin.sourcedir) ] setup_directories(plugin) plugin._pip() mock_run.assert_has_calls(calls)
def test_looking_for_driver_no_drivers(self): user = { 'chat_id': 0, 'current_location': [0., 0.] } drivers = [] future_get_drivers = Future() future_get_drivers.set_result(drivers) self.users.get_drivers_within_distance = mock.MagicMock( return_value=future_get_drivers ) yield self.stage.run(user, {}) self.stage.sender.assert_has_calls([ mock.call({ 'chat_id': 0, 'text': 'looking for a driver' }), mock.call({ 'chat_id': 0, 'text': 'no available drivers found' }) ]) self.assertEqual(2,self.stage.sender.call_count) self.assertEqual(None, user['proposed_driver'])
def test_adding_return_value_mock(self): for Klass in Mock, MagicMock: mock = Klass() mock.return_value = MagicMock() mock()() self.assertEqual(mock.mock_calls, [call(), call()()])
def test_setup_full_config(self, mock_uvc, mock_remote): """"Test the setup with full configuration.""" config = { 'platform': 'uvc', 'nvr': 'foo', 'port': 123, 'key': 'secret', } fake_cameras = [ {'uuid': 'one', 'name': 'Front', 'id': 'id1'}, {'uuid': 'two', 'name': 'Back', 'id': 'id2'}, {'uuid': 'three', 'name': 'Old AirCam', 'id': 'id3'}, ] def fake_get_camera(uuid): """"Create a fake camera.""" if uuid == 'id3': return {'model': 'airCam'} else: return {'model': 'UVC'} mock_remote.return_value.index.return_value = fake_cameras mock_remote.return_value.get_camera.side_effect = fake_get_camera mock_remote.return_value.server_version = (3, 2, 0) assert setup_component(self.hass, 'camera', {'camera': config}) self.assertEqual(mock_remote.call_count, 1) self.assertEqual( mock_remote.call_args, mock.call('foo', 123, 'secret') ) mock_uvc.assert_has_calls([ mock.call(mock_remote.return_value, 'id1', 'Front'), mock.call(mock_remote.return_value, 'id2', 'Back'), ])
def test_build_local_sources(self): class Options: source = '.' node_packages = [] node_engine = '4' plugin = nodejs.NodePlugin('test-part', Options(), self.project_options) os.makedirs(plugin.sourcedir) open(os.path.join(plugin.sourcedir, 'package.json'), 'w').close() plugin.build() self.run_mock.assert_has_calls([ mock.call(['npm', '--cache-min=Infinity', 'install'], cwd=plugin.builddir), mock.call(['npm', '--cache-min=Infinity', 'install', '--global'], cwd=plugin.builddir)]) self.tar_mock.assert_has_calls([ mock.call( nodejs.get_nodejs_release(plugin.options.node_engine), path.join(os.path.abspath('.'), 'parts', 'test-part', 'npm')), mock.call().provision( plugin.installdir, clean_target=False, keep_tarball=True)])
def test_see_service(self, mock_see): """Test the see service with a unicode dev_id and NO MAC.""" with assert_setup_component(1, device_tracker.DOMAIN): assert setup_component(self.hass, device_tracker.DOMAIN, TEST_PLATFORM) params = { 'dev_id': 'some_device', 'host_name': 'example.com', 'location_name': 'Work', 'gps': [.3, .8], 'attributes': { 'test': 'test' } } device_tracker.see(self.hass, **params) self.hass.block_till_done() assert mock_see.call_count == 1 assert mock_see.call_count == 1 assert mock_see.call_args == call(**params) mock_see.reset_mock() params['dev_id'] += chr(233) # e' acute accent from icloud device_tracker.see(self.hass, **params) self.hass.block_till_done() assert mock_see.call_count == 1 assert mock_see.call_count == 1 assert mock_see.call_args == call(**params)
def test_validate_true (self): corpus = MagicMock(spec_set=tacl.Corpus) text = MagicMock(spec_set=tacl.Text) text.get_checksum.return_value = sentinel.checksum text.get_names.return_value = (sentinel.name, sentinel.siglum) corpus.get_texts.return_value = (text,) catalogue = collections.OrderedDict( [(sentinel.text1, sentinel.label1), (sentinel.text2, sentinel.label2), (sentinel.text3, sentinel.label1)]) store = tacl.DataStore(':memory:') store._conn = MagicMock(spec_set=sqlite3.Connection) cursor = store._conn.execute.return_value cursor.fetchone.return_value = {'checksum': sentinel.checksum} actual_result = store.validate(corpus, catalogue) corpus.get_texts.assert_has_calls([ call(sentinel.text1), call(sentinel.text2), call(sentinel.text3)]) self.assertEqual(store._conn.mock_calls, [call.execute(tacl.constants.SELECT_TEXT_SQL, [sentinel.name, sentinel.siglum]), call.execute().fetchone(), call.execute(tacl.constants.SELECT_TEXT_SQL, [sentinel.name, sentinel.siglum]), call.execute().fetchone(), call.execute(tacl.constants.SELECT_TEXT_SQL, [sentinel.name, sentinel.siglum]), call.execute().fetchone()]) self.assertEqual(actual_result, True)
def test_provision_convert_pt(self): self.allocation.project_id = None self.allocation.project_name = None self.allocation.convert_trial_project = True with test.nested( mock.patch.object(self.manager, 'k_client'), mock.patch.object(self.manager, 'convert_trial'), mock.patch.object(self.manager, 'set_quota'), mock.patch.object(self.manager, 'quota_report'), mock.patch.object(self.manager, 'notify_provisioned'), mock.patch.object(self.manager, 'update_allocation'), mock.patch.object(self.manager, 'revert_expiry'), mock.patch('nectar_tools.expiry.archiver.DesignateArchiver'), ) as (mock_keystone, mock_convert, mock_quota, mock_report, mock_notify, mock_update, mock_revert, mock_designate): mock_update.return_value = self.allocation project = fakes.FakeProject() mock_convert.return_value = project mock_keystone.projects.find.side_effect = keystone_exc.NotFound() mock_designate.return_value = mock.Mock() self.manager.provision(self.allocation) mock_convert.assert_called_once_with(self.allocation) mock_designate.create_resources.called_once_with() mock_quota.assert_called_once_with(self.allocation) mock_report.assert_called_once_with(self.allocation, html=True, show_current=False) mock_notify.assert_called_once_with(self.allocation, True, project, mock_report.return_value) update_calls = [mock.call(self.allocation, project_id=project.id), mock.call(self.allocation, provisioned=True)] mock_update.assert_has_calls(update_calls) mock_revert.assert_not_called()
def test_flavor_grant(self, mock_get_nova): nova_client = mock.Mock() mock_get_nova.return_value = nova_client def good_get_keys(): return {'flavor_class:name': 'compute'} def bad_get_keys(): return {'flavor_class:name': 'standard'} def none_get_keys(): return {'foo': 'bar'} small = mock.Mock(get_keys=good_get_keys) small.name = 'c3.small' medium = mock.Mock(get_keys=good_get_keys) medium.name = 'c3.medium' large = mock.Mock(get_keys=good_get_keys) large.name = 'c3.large' other = mock.Mock(get_keys=bad_get_keys) other.name = 'c1.small' no_prefix = mock.Mock(get_keys=none_get_keys) no_prefix.name = 'custom-flavor' all_flavors = [small, medium, large, other, no_prefix] nova_client.flavors.list.return_value = all_flavors self.manager.flavor_grant(self.allocation, 'compute') calls = [mock.call(small, self.allocation.project_id), mock.call(medium, self.allocation.project_id), mock.call(large, self.allocation.project_id), ] nova_client.flavor_access.add_tenant_access.assert_has_calls( calls)
def test_build(self, _, run_mock): plugin = rust.RustPlugin("test-part", self.options, self.project_options) os.makedirs(plugin.sourcedir) plugin.build() self.assertThat(run_mock.call_count, Equals(2)) run_mock.assert_has_calls( [ mock.call( [ plugin._cargo, "test", "-j{}".format(plugin.project.parallel_build_count), ], env=plugin._build_env(), ), mock.call( [ plugin._cargo, "install", "-j{}".format(plugin.project.parallel_build_count), "--root", plugin.installdir, "--path", plugin.builddir, ], env=plugin._build_env(), ), ] )
def test_download_build(mock_cc, mock_sp, capsys): mock_client = MagicMock(no_config=False) mock_client.get_build_details.return_value = \ MagicMock( data={"chroots": { u'epel-6-x86_64': u'succeeded', u'epel-6-i386': u'succeeded' }}, results_by_chroot={ u'epel-6-x86_64': u'http://example.com/results/epel-6-x86_64/python-copr-1.50-1.fc20', u'epel-6-i386': u'http://example.com/results/epel-6-i386/python-copr-1.50-1.fc20', } ) mock_cc.create_from_file_config.return_value = mock_client mock_sp.call.return_value = None main.main(argv=["download-build", "foo"]) stdout, stderr = capsys.readouterr() expected_sp_call_args = [ mock.call([ 'wget', '-r', '-nH', '--no-parent', '--reject', "'index.html*'", '-P', u'./epel-6-x86_64', '--cut-dirs', '6', 'http://example.com/results/epel-6-x86_64/python-copr-1.50-1.fc20' ]), mock.call([ 'wget', '-r', '-nH', '--no-parent', '--reject', "'index.html*'", '-P', u'./epel-6-i386', '--cut-dirs', '6', 'http://example.com/results/epel-6-i386/python-copr-1.50-1.fc20' ]) ] assert mock_sp.call.call_args_list == expected_sp_call_args
def test_load(self): """R1 := [A1], R2 := [A2].""" addr1, val1 = 5, 123456 addr2, val2 = 10, 654321 self.ram.put(addr1, val1, WORD_SIZE) self.ram.put(addr2, val2, WORD_SIZE) self.control_unit.address1 = addr1 self.control_unit.address2 = addr2 for opcode in ARITHMETIC_OPCODES | {OP_COMP}: self.registers.put.reset_mock() self.control_unit.opcode = opcode self.control_unit.load() self.registers.put.assert_has_calls([call("R1", val1, WORD_SIZE), call("R2", val2, WORD_SIZE)]) for opcode in {OP_MOVE}: self.registers.put.reset_mock() self.control_unit.opcode = opcode self.control_unit.load() self.registers.put.assert_called_once_with("R1", val2, WORD_SIZE) for opcode in CONDJUMP_OPCODES | {OP_JUMP}: self.registers.put.reset_mock() self.control_unit.opcode = opcode self.control_unit.load() self.registers.put.assert_called_once_with("ADDR", addr1, BYTE_SIZE) for opcode in {OP_HALT}: self.registers.put.reset_mock() self.control_unit.opcode = opcode self.control_unit.load() assert not self.registers.put.called
def test_pull_with_revision(self, run_mock, script_mock): plugin = rust.RustPlugin("test-part", self.options, self.project_options) os.makedirs(plugin.sourcedir) plugin.options.rust_revision = "1.13.0" plugin.options.rust_channel = "" plugin.pull() self.assertThat(run_mock.call_count, Equals(2)) rustdir = os.path.join(plugin.partdir, "rust") run_mock.assert_has_calls( [ mock.call( [ os.path.join(rustdir, "rustup.sh"), "--prefix={}".format(rustdir), "--disable-sudo", "--save", "--revision=1.13.0", ] ), mock.call( [ plugin._cargo, "fetch", "--manifest-path", os.path.join(plugin.sourcedir, "Cargo.toml"), ], env=plugin._build_env(), ), ] )
def test_build_with_kconfigfile(self, run_mock, check_call_mock): self.options.kconfigfile = 'config' with open(self.options.kconfigfile, 'w') as f: f.write('ACCEPT=y\n') plugin = kbuild.KBuildPlugin('test-part', self.options, self.project_options) os.makedirs(plugin.builddir) plugin.build() self.assertThat(check_call_mock.call_count, Equals(1)) check_call_mock.assert_has_calls([ mock.call('yes "" | make -j2 oldconfig', shell=True, cwd=plugin.builddir), ]) self.assertThat(run_mock.call_count, Equals(2)) run_mock.assert_has_calls([ mock.call(['make', '-j2']), mock.call(['make', '-j2', 'CONFIG_PREFIX={}'.format(plugin.installdir), 'install']) ]) config_file = os.path.join(plugin.builddir, '.config') self.assertTrue(os.path.exists(config_file)) with open(config_file) as f: config_contents = f.read() self.assertThat(config_contents, Equals('ACCEPT=y\n'))
def run_fetch(self, value, opcode, instruction_size, r2=True): """Run one fetch test.""" address1 = 10 address2 = 42 self.ram.put(address1, value, instruction_size) increment = instruction_size // self.ram.word_size # pylint: disable=no-member self.registers.fetch.reset_mock() self.registers.put.reset_mock() def get_register(name, size): """Get PC.""" if name == "PC": assert size == 2 * BYTE_SIZE return address1 elif name == "R2": assert size == WORD_SIZE return address2 else: raise KeyError() self.registers.fetch.side_effect = get_register self.control_unit.fetch_and_decode() if r2: self.registers.fetch.assert_has_calls([call("PC", 2 * BYTE_SIZE), call("R2", WORD_SIZE)]) else: self.registers.fetch.assert_any_call("PC", 2 * BYTE_SIZE) self.registers.put.assert_has_calls([call("RI", value, WORD_SIZE), call("PC", address1 + increment, 2 * BYTE_SIZE)]) assert self.control_unit.opcode == opcode
def test_websocket_upgrade_read_write(self): mock_server = self._get_mock_server() s = socket.Socket(mock_server, 'sid') s.connected = True s.queue.join = mock.MagicMock(return_value=None) foo = six.text_type('foo') bar = six.text_type('bar') probe = six.text_type('probe') s.poll = mock.MagicMock(side_effect=[ [packet.Packet(packet.MESSAGE, data=bar)], IOError]) ws = mock.MagicMock() ws.wait.side_effect = [ packet.Packet(packet.PING, data=probe).encode( always_bytes=False), packet.Packet(packet.UPGRADE).encode(always_bytes=False), packet.Packet(packet.MESSAGE, data=foo).encode( always_bytes=False), None] s._websocket_handler(ws) time.sleep(0) self.assertTrue(s.upgraded) self.assertEqual(mock_server._trigger_event.call_count, 2) mock_server._trigger_event.assert_has_calls([ mock.call('message', 'sid', 'foo'), mock.call('disconnect', 'sid')]) ws.send.assert_called_with('4bar')
def test_run_ansible_with_check(self, mc_check_for_ans_errror): builder = self.get_test_builder() cmd = "cmd" module_name = "module_name" as_root = True err_codes = [1, 3, 7, ] success_codes = [0, 255] results = mock.MagicMock() err_results = mock.MagicMock() mc_check_for_ans_errror.return_value = (False, []) builder._run_ansible = mock.MagicMock() builder._run_ansible.return_value = results got_results = builder.run_ansible_with_check( cmd, module_name, as_root, err_codes, success_codes) assert results == got_results expected_call_run = mock.call(cmd, module_name, as_root) assert expected_call_run == builder._run_ansible.call_args expected_call_check = mock.call(results, builder.hostname, err_codes, success_codes) assert expected_call_check == mc_check_for_ans_errror.call_args mc_check_for_ans_errror.side_effect = AnsibleResponseError(msg="err message", **err_results) with pytest.raises(AnsibleCallError): builder.run_ansible_with_check( cmd, module_name, as_root, err_codes, success_codes)
def test_load_profile(skip_message, isfile_retval): """test load_profile func.""" m_open = mock.mock_open() m_open_path = 'melissa.profile_loader.open' with mock.patch('melissa.profile_loader.profile_populator') as m_pp, \ mock.patch('melissa.profile_loader.os') as m_os, \ mock.patch('sys.stdout', new_callable=StringIO) as m_stdout, \ mock.patch(m_open_path, m_open, create=True), \ mock.patch('melissa.profile_loader.json') as m_json: m_os.path.isfile.return_value = isfile_retval from melissa.profile_loader import load_profile from melissa.utilities import json_decode as jd res = load_profile(skip_message=skip_message) # testing assert res == m_json.load.return_value m_os.path.isfile.assert_called_once_with('profile.json') if isfile_retval: m_pp.assert_not_called() else: m_pp.assert_called_once_with() if skip_message: assert m_stdout.getvalue() == '' else: assert "Loading profile data" in m_stdout.getvalue() m_open.assert_has_calls( [mock.call('profile.json'), mock.call().close()]) m_json.load.assert_called_once_with( m_open.return_value, object_hook=jd.decode_dict)
def test_successful_login_with_2fa( self, mock_login, mock_get_account_information): self.mock_input.side_effect = ('*****@*****.**', '123456') mock_login.side_effect = [ storeapi.errors.StoreTwoFactorAuthenticationRequired(), None] # no exception raised. result = self.run_command(['login']) self.assertThat(result.exit_code, Equals(0)) self.assertThat(result.output, Not(Contains( storeapi.constants.TWO_FACTOR_WARNING))) self.assertThat(result.output, Contains('Login successful.')) self.assertThat(self.mock_input.call_count, Equals(2)) self.mock_input.assert_has_calls([ mock.call('Email: '), mock.call('Second-factor auth: ')]) self.assertThat(mock_login.call_count, Equals(2)) mock_login.assert_has_calls([ mock.call( '*****@*****.**', mock.ANY, acls=None, packages=None, channels=None, save=True), mock.call( '*****@*****.**', mock.ANY, one_time_password='******', acls=None, packages=None, channels=None, save=True)])
def test_tx_scanner(self): node_backend = Mock() block = Mock() node_backend.get_block.side_effect = lambda block_num: block tx1 = Mock() tx2 = Mock() tx3 = Mock() node_backend.get_transactions_from_block.side_effect = lambda d: [tx1, tx2, tx3] sut = BitcoinScanner([123], node_backend) tx_observer = Mock() sut.transactions_observers.append(tx_observer) sut.scan() node_backend.get_transactions_from_block.assert_called_once_with(block) node_backend.get_block.assert_called_once_with(block_num=123) self.assertEqual( [call(tx1), call(tx2), call(tx3)], tx_observer.on_transaction.call_args_list )
def test_both_new_matches_sent_emails(self, mock_send_to_school, mock_send_email): report1 = self.create_match(self.user1, 'dummy') report2 = self.create_match(self.user2, 'dummy') find_matches() calls = [call(self.user1, report1), call(self.user2, report2)] mock_send_email.assert_has_calls(calls) self.assertEqual(mock_send_email.call_count, 2)
def test_setup(self): # Return something other than a Mock to ease later assertions self.check_output_mock.return_value = b"" self.rosdep.setup() # Verify that only rosdep was installed (no other .debs) self.assertThat(self.ubuntu_mock.call_count, Equals(1)) self.assertThat(self.ubuntu_mock.return_value.get.call_count, Equals(1)) self.assertThat(self.ubuntu_mock.return_value.unpack.call_count, Equals(1)) self.ubuntu_mock.assert_has_calls( [ mock.call( self.rosdep._rosdep_path, sources="sources", project_options=self.project, ), mock.call().get(["python-rosdep"]), mock.call().unpack(self.rosdep._rosdep_install_path), ] ) # Verify that rosdep was initialized and updated self.assertThat(self.check_output_mock.call_count, Equals(2)) self.check_output_mock.assert_has_calls( [ mock.call(["rosdep", "init"], env=mock.ANY), mock.call(["rosdep", "update"], env=mock.ANY), ] )
def test_loop(mock_input, mock_time, use_case): """Test each use case""" pin = 19 def input_generator(readings): for r in readings: incr_time(r[0]) yield r[1] mock_input.side_effect = input_generator(use_case['readings']) sleeps = [r[0] for r in use_case['readings']] mock_time.side_effect = sleeps observer = Mock() w = Watcher(observer, pin, debounce_delay=use_case['debounce'], sleep=sleeps) w.start() w.join() assert mock_time.mock_calls == [call(s) for s in sleeps] assert mock_input.mock_calls == [call(pin) for i in range(len(sleeps))] assert observer.update_pin.mock_calls == [ call(19, u) for u in use_case['expected_updates'] ]
def test_login_tries_both_addrs_and_caches(self, mock_camera, mock_store): """"Test the login tries.""" responses = [0] def fake_login(*a): """Fake login.""" try: responses.pop(0) raise socket.error except IndexError: pass mock_store.return_value.get_camera_password.return_value = None mock_camera.return_value.login.side_effect = fake_login self.uvc._login() self.assertEqual(2, mock_camera.call_count) self.assertEqual('host-b', self.uvc._connect_addr) mock_camera.reset_mock() self.uvc._login() self.assertEqual(mock_camera.call_count, 1) self.assertEqual( mock_camera.call_args, mock.call('host-b', 'admin', 'seekret') ) self.assertEqual(mock_camera.return_value.login.call_count, 1) self.assertEqual(mock_camera.return_value.login.call_args, mock.call())
def test_setup_partial_config_v31x(self, mock_uvc, mock_remote): """Test the setup with a v3.1.x server.""" config = { 'platform': 'uvc', 'nvr': 'foo', 'key': 'secret', } fake_cameras = [ {'uuid': 'one', 'name': 'Front', 'id': 'id1'}, {'uuid': 'two', 'name': 'Back', 'id': 'id2'}, ] mock_remote.return_value.index.return_value = fake_cameras mock_remote.return_value.get_camera.return_value = {'model': 'UVC'} mock_remote.return_value.server_version = (3, 1, 3) assert setup_component(self.hass, 'camera', {'camera': config}) self.assertEqual(mock_remote.call_count, 1) self.assertEqual( mock_remote.call_args, mock.call('foo', 7080, 'secret') ) mock_uvc.assert_has_calls([ mock.call(mock_remote.return_value, 'one', 'Front', 'ubnt'), mock.call(mock_remote.return_value, 'two', 'Back', 'ubnt'), ])
def test_download_redownload_as_hash_mismatches(self): with open('os.snap', 'wb') as f: f.write(b'0000000') snap_content = b'1234567890' snap_sha512 = ('12b03226a6d8be9c6e8cd5e55dc6c7920caaa39df14aab92d5e' '3ea9340d1c8a4d3d0b8e4314f1f6ef131ba4bf1ceb9186ab87c' '801af0d5c95b1befb8cedae2b9') mock_details = self.mock_get.return_value mock_details.ok = True mock_details.content = json.dumps({ '_embedded': { 'clickindex:package': [{ 'download_url': 'http://localhost', 'anon_download_url': 'http://localhost', 'download_sha512': snap_sha512, }], } }).encode('utf-8') mock_snap = Response() mock_snap.status_code = 200 mock_snap._content = snap_content self.mock_get.side_effect = [mock_details, mock_snap] download('os', 'edge', 'os.snap', None, 'amd64') self.mock_logger.info.assert_has_calls([ call("Getting details for 'os'"), call("Downloading 'os'"), call("Successfully downloaded 'os'")]) self.assertTrue(os.path.exists('os.snap'))
def test_pip(self, mock_run, mock_setup_pip): self.options.requirements = 'requirements.txt' self.options.constraints = 'constraints.txt' self.options.python_packages = ['test', 'packages'] plugin = python2.Python2Plugin('test-part', self.options, self.project_options) setup_directories(plugin) pip2 = os.path.join(plugin.installdir, 'usr', 'bin', 'pip2') include = os.path.join( plugin.installdir, 'usr', 'include', 'python2.7') target = os.path.join( plugin.installdir, 'usr', 'lib', 'python2.7', 'site-packages') pip_install = ['python2', pip2, 'install', '--global-option=build_ext', '--global-option=-I{}'.format(include), '--target', target] requirements_path = os.path.join(plugin.sourcedir, 'requirements.txt') constraints_path = os.path.join(plugin.sourcedir, 'constraints.txt') pip_install = pip_install + ['--constraint', constraints_path] calls = [ mock.call(pip_install + ['--requirement', requirements_path]), mock.call(pip_install + ['--upgrade', 'test', 'packages']), mock.call(pip_install + ['.'], cwd=plugin.sourcedir) ] plugin.pull() mock_run.assert_has_calls(calls)
async def test_verisure_no_default_code(hass): """Test configs without a default lock code.""" await setup_verisure_locks(hass, NO_DEFAULT_LOCK_CODE_CONFIG) with mock_hub(NO_DEFAULT_LOCK_CODE_CONFIG, STATE_UNLOCKED) as hub: mock = hub.session.set_lock_state await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, { 'entity_id': 'lock.door_lock', }) await hass.async_block_till_done() assert mock.call_count == 0 await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, { 'entity_id': 'lock.door_lock', 'code': '12345', }) await hass.async_block_till_done() assert mock.call_args == call('12345', LOCKS[0], 'lock') mock.reset_mock() await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, { 'entity_id': 'lock.door_lock', }) await hass.async_block_till_done() assert mock.call_count == 0 await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, { 'entity_id': 'lock.door_lock', 'code': '12345', }) await hass.async_block_till_done() assert mock.call_args == call('12345', LOCKS[0], 'unlock')
def test_download_fails_due_to_hash_mismatch(self): snap_content = b'1234567890' mock_details = self.mock_get.return_value mock_details.ok = True mock_details.content = json.dumps({ '_embedded': { 'clickindex:package': [{ 'download_url': 'http://localhost', 'anon_download_url': 'http://localhost', 'download_sha512': '12345', }], } }).encode('utf-8') mock_snap = Response() mock_snap.status_code = 200 mock_snap._content = snap_content self.mock_get.side_effect = [mock_details, mock_snap] with self.assertRaises(RuntimeError) as raised: download('os', 'edge', 'os.snap', None, 'amd64') self.assertEqual("Failed to download 'os'", str(raised.exception)) self.mock_logger.info.assert_has_calls([ call("Getting details for 'os'"), call("Downloading 'os'")]) self.assertTrue(os.path.exists('os.snap'))
def test_regression_load_tasks_actions_(self, mc_get, init_jg): """ https://bugzilla.redhat.com/show_bug.cgi?id=1182106 """ action_1 = MagicMock() action_2 = MagicMock() mc_get.return_value.json.return_value = { "actions": [ action_1, action_2, ], "builds": [], } self.jg.route_build_task = MagicMock() self.jg.event = MagicMock() self.jg.process_action = MagicMock() # load_tasks should suppress this error self.jg.process_action.side_effect = IOError() self.jg.load_tasks() expected_calls = [call(action_1), call(action_2)] assert self.jg.process_action.call_args_list == expected_calls
async def test_fan(hass, config_entry, zha_gateway): """Test zha fan platform.""" # create zigpy device zigpy_device = await async_init_zigpy_device( hass, [hvac.Fan.cluster_id, general.Basic.cluster_id], [], None, zha_gateway ) # load up fan domain await hass.config_entries.async_forward_entry_setup(config_entry, DOMAIN) await hass.async_block_till_done() cluster = zigpy_device.endpoints.get(1).fan zha_device = zha_gateway.get_device(zigpy_device.ieee) entity_id = await find_entity_id(DOMAIN, zha_device, hass) assert entity_id is not None # test that the fan was created and that it is unavailable assert hass.states.get(entity_id).state == STATE_UNAVAILABLE # allow traffic to flow through the gateway and device await async_enable_traffic(hass, zha_gateway, [zha_device]) # test that the state has changed from unavailable to off assert hass.states.get(entity_id).state == STATE_OFF # turn on at fan attr = make_attribute(0, 1) hdr = make_zcl_header(zcl_f.Command.Report_Attributes) cluster.handle_message(hdr, [[attr]]) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ON # turn off at fan attr.value.value = 0 cluster.handle_message(hdr, [[attr]]) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_OFF # turn on from HA with patch( "zigpy.zcl.Cluster.write_attributes", return_value=mock_coro([zcl_f.Status.SUCCESS, zcl_f.Status.SUCCESS]), ): # turn on via UI await async_turn_on(hass, entity_id) assert len(cluster.write_attributes.mock_calls) == 1 assert cluster.write_attributes.call_args == call({"fan_mode": 2}) # turn off from HA with patch( "zigpy.zcl.Cluster.write_attributes", return_value=mock_coro([zcl_f.Status.SUCCESS, zcl_f.Status.SUCCESS]), ): # turn off via UI await async_turn_off(hass, entity_id) assert len(cluster.write_attributes.mock_calls) == 1 assert cluster.write_attributes.call_args == call({"fan_mode": 0}) # change speed from HA with patch( "zigpy.zcl.Cluster.write_attributes", return_value=mock_coro([zcl_f.Status.SUCCESS, zcl_f.Status.SUCCESS]), ): # turn on via UI await async_set_speed(hass, entity_id, speed=fan.SPEED_HIGH) assert len(cluster.write_attributes.mock_calls) == 1 assert cluster.write_attributes.call_args == call({"fan_mode": 3}) # test adding new fan to the network and HA await async_test_device_join(hass, zha_gateway, hvac.Fan.cluster_id, entity_id)
def test_run_copy( mock_split_file, mock_compress_file, mock_session, mock_s3_delete, mock_s3_upload, mock_rs_copy, mock_remove, rs_creds, dbapi): def reset_mocks(): mock_split_file.reset_mock() mock_compress_file.reset_mock() mock_s3_upload.reset_mock() mock_s3_delete.reset_mock() mock_rs_copy.reset_mock() mock_remove.reset_mock() with mock.patch(dbapi.__name__ + '.connect') as mock_connect: r = locopy.S3(dbapi=dbapi, **rs_creds) expected_calls_no_folder = [ mock.call('/path/local_file.0', 's3_bucket', 'local_file.0'), mock.call('/path/local_file.1', 's3_bucket', 'local_file.1'), mock.call('/path/local_file.2', 's3_bucket', 'local_file.2')] expected_calls_no_folder_gzip = [ mock.call('/path/local_file.0.gz', 's3_bucket', 'local_file.0.gz'), mock.call('/path/local_file.1.gz', 's3_bucket', 'local_file.1.gz'), mock.call('/path/local_file.2.gz', 's3_bucket', 'local_file.2.gz')] expected_calls_folder = [ mock.call('/path/local_file.0', 's3_bucket', 'test/local_file.0'), mock.call('/path/local_file.1', 's3_bucket', 'test/local_file.1'), mock.call('/path/local_file.2', 's3_bucket', 'test/local_file.2')] expected_calls_folder_gzip = [ mock.call('/path/local_file.0.gz', 's3_bucket', 'test/local_file.0.gz'), mock.call('/path/local_file.1.gz', 's3_bucket', 'test/local_file.1.gz'), mock.call( '/path/local_file.2.gz', 's3_bucket', 'test/local_file.2.gz')] r.run_copy( '/path/local_file.txt', 's3_bucket', 'table_name', delim="|", copy_options=['SOME OPTION']) # assert assert not mock_split_file.called mock_compress_file.assert_called_with( '/path/local_file.txt', '/path/local_file.txt.gz') mock_remove.assert_called_with('/path/local_file.txt') mock_s3_upload.assert_called_with( '/path/local_file.txt.gz', 's3_bucket', 'local_file.txt.gz') mock_rs_copy.assert_called_with( 'table_name', 's3://s3_bucket/local_file', '|', copy_options=['SOME OPTION', 'GZIP']) assert not mock_s3_delete.called, 'Only delete when explicit' reset_mocks() mock_split_file.return_value = ['/path/local_file.0', '/path/local_file.1', '/path/local_file.2'] r.run_copy( '/path/local_file', 's3_bucket', 'table_name', delim="|", copy_options=['SOME OPTION'], splits=3, delete_s3_after=True) # assert mock_split_file.assert_called_with( '/path/local_file', '/path/local_file', splits=3) mock_compress_file.assert_called_with( '/path/local_file.2', '/path/local_file.2.gz') mock_remove.assert_called_with('/path/local_file.2') mock_s3_upload.assert_has_calls(expected_calls_no_folder_gzip) mock_rs_copy.assert_called_with( 'table_name', 's3://s3_bucket/local_file', '|', copy_options=['SOME OPTION', 'GZIP']) assert mock_s3_delete.called_with('s3_bucket', 'local_file.0.gz') assert mock_s3_delete.called_with('s3_bucket', 'local_file.1.gz') assert mock_s3_delete.called_with('s3_bucket', 'local_file.2.gz') reset_mocks() r.run_copy( '/path/local_file', 's3_bucket', 'table_name', delim=",", copy_options=['SOME OPTION'], compress=False) # assert assert not mock_split_file.called assert not mock_compress_file.called assert not mock_remove.called mock_s3_upload.assert_called_with( '/path/local_file', 's3_bucket', 'local_file') mock_rs_copy.assert_called_with( 'table_name', 's3://s3_bucket/local_file', ',', copy_options=['SOME OPTION']) assert not mock_s3_delete.called, 'Only delete when explicit' reset_mocks() mock_split_file.return_value = ['/path/local_file.0', '/path/local_file.1', '/path/local_file.2'] r.run_copy( '/path/local_file', 's3_bucket', 'table_name', delim="|", copy_options=['SOME OPTION'], splits=3, compress=False) # assert mock_split_file.assert_called_with( '/path/local_file', '/path/local_file', splits=3) assert not mock_compress_file.called assert not mock_remove.called mock_s3_upload.assert_has_calls(expected_calls_no_folder) mock_rs_copy.assert_called_with( 'table_name', 's3://s3_bucket/local_file', '|', copy_options=['SOME OPTION']) assert not mock_s3_delete.called # with a s3_folder included and no splits reset_mocks() mock_split_file.return_value = ['/path/local_file.0', '/path/local_file.1', '/path/local_file.2'] r.run_copy( '/path/local_file.txt', 's3_bucket', 'table_name', delim="|", copy_options=['SOME OPTION'], compress=False, s3_folder='test') # assert assert not mock_split_file.called assert not mock_compress_file.called assert not mock_remove.called mock_s3_upload.assert_called_with( '/path/local_file.txt', 's3_bucket', 'test/local_file.txt') mock_rs_copy.assert_called_with( 'table_name', 's3://s3_bucket/test/local_file', '|', copy_options=['SOME OPTION']) assert not mock_s3_delete.called # with a s3_folder included and splits reset_mocks() r.run_copy( '/path/local_file', 's3_bucket', 'table_name', delim="|", copy_options=['SOME OPTION'], splits=3, compress=False, s3_folder='test', delete_s3_after=True) # assert mock_split_file.assert_called_with( '/path/local_file', '/path/local_file', splits=3) assert not mock_compress_file.called assert not mock_remove.called mock_s3_upload.assert_has_calls(expected_calls_folder) mock_rs_copy.assert_called_with( 'table_name', 's3://s3_bucket/test/local_file', '|', copy_options=['SOME OPTION']) assert mock_s3_delete.called_with('s3_bucket', 'test/local_file.0') assert mock_s3_delete.called_with('s3_bucket', 'test/local_file.1') assert mock_s3_delete.called_with('s3_bucket', 'test/local_file.2') # with a s3_folder included , splits, and gzip reset_mocks() r.run_copy( '/path/local_file', 's3_bucket', 'table_name', delim="|", copy_options=['SOME OPTION'], splits=3, s3_folder='test') # assert mock_split_file.assert_called_with( '/path/local_file', '/path/local_file', splits=3) assert mock_compress_file.called assert mock_remove.called mock_s3_upload.assert_has_calls(expected_calls_folder_gzip) mock_rs_copy.assert_called_with( 'table_name', 's3://s3_bucket/test/local_file', '|', copy_options=['SOME OPTION', 'GZIP']) assert not mock_s3_delete.called
class TestFetchIdevs: """Tests cc_grub_dpkg.fetch_idevs()""" # Note: udevadm info returns devices in a large single line string @pytest.mark.parametrize( "grub_output,path_exists,expected_log_call,udevadm_output" ",expected_idevs", [ # Inside a container, grub not installed ( ProcessExecutionError(reason=FileNotFoundError()), False, mock.call("'grub-probe' not found in $PATH"), "", "", ), # Inside a container, grub installed ( ProcessExecutionError(stderr="failed to get canonical path"), False, mock.call("grub-probe 'failed to get canonical path'"), "", "", ), # KVM Instance ( ["/dev/vda"], True, None, ( "/dev/disk/by-path/pci-0000:00:00.0 ", "/dev/disk/by-path/virtio-pci-0000:00:00.0 ", ), "/dev/vda", ), # Xen Instance ( ["/dev/xvda"], True, None, "", "/dev/xvda", ), # NVMe Hardware Instance ( ["/dev/nvme1n1"], True, None, ( "/dev/disk/by-id/nvme-Company_hash000 ", "/dev/disk/by-id/nvme-nvme.000-000-000-000-000 ", "/dev/disk/by-path/pci-0000:00:00.0-nvme-0 ", ), "/dev/disk/by-id/nvme-Company_hash000", ), # SCSI Hardware Instance ( ["/dev/sda"], True, None, ( "/dev/disk/by-id/company-user-1 ", "/dev/disk/by-id/scsi-0Company_user-1 ", "/dev/disk/by-path/pci-0000:00:00.0-scsi-0:0:0:0 ", ), "/dev/disk/by-id/company-user-1", ), ], ) @mock.patch("cloudinit.config.cc_grub_dpkg.util.logexc") @mock.patch("cloudinit.config.cc_grub_dpkg.os.path.exists") @mock.patch("cloudinit.config.cc_grub_dpkg.subp.subp") def test_fetch_idevs( self, m_subp, m_exists, m_logexc, grub_output, path_exists, expected_log_call, udevadm_output, expected_idevs, ): """Tests outputs from grub-probe and udevadm info against grub-dpkg""" m_subp.side_effect = [grub_output, ["".join(udevadm_output)]] m_exists.return_value = path_exists log = mock.Mock(spec=Logger) idevs = fetch_idevs(log) assert expected_idevs == idevs if expected_log_call is not None: assert expected_log_call in log.debug.call_args_list
def test_run_global_version_update( self, open_mock, gather_local_configurations, find_project_configuration, subprocess, ) -> None: arguments = MagicMock() arguments.hash = "abcd" arguments.paths = [] arguments.push_blocking_only = False with patch("json.dump") as dump: mocks = [ mock_open(read_data='{"version": "old"}').return_value, mock_open(read_data="{}").return_value, mock_open(read_data='{"push_blocking": false}').return_value, mock_open(read_data="{}").return_value, mock_open(read_data='{"push_blocking": true}').return_value, mock_open(read_data="{}").return_value, ] open_mock.side_effect = mocks upgrade.run_global_version_update(arguments) dump.assert_has_calls([ call({"version": "abcd"}, mocks[1], indent=2, sort_keys=True), call( { "push_blocking": False, "version": "old" }, mocks[3], indent=2, sort_keys=True, ), call( { "push_blocking": True, "version": "old" }, mocks[5], indent=2, sort_keys=True, ), ]) subprocess.assert_called_once_with([ "hg", "commit", "--message", upgrade._commit_message( "global configuration", summary_override="Automatic upgrade to hash `abcd`", ), ]) # Push blocking argument: Since the push blocking only argument is only used # when gathering local configurations (mocked here), this is a no-op. # Documents it. subprocess.reset_mock() arguments.push_blocking_only = True arguments.submit = True with patch("json.dump") as dump: mocks = [ mock_open(read_data='{"version": "old"}').return_value, mock_open(read_data="{}").return_value, mock_open(read_data='{"push_blocking": false}').return_value, mock_open(read_data="{}").return_value, mock_open(read_data='{"push_blocking": true}').return_value, mock_open(read_data="{}").return_value, ] open_mock.side_effect = mocks upgrade.run_global_version_update(arguments) dump.assert_has_calls([ call({"version": "abcd"}, mocks[1], indent=2, sort_keys=True), call( { "push_blocking": False, "version": "old" }, mocks[3], indent=2, sort_keys=True, ), call( { "push_blocking": True, "version": "old" }, mocks[5], indent=2, sort_keys=True, ), ]) calls = [ call([ "hg", "commit", "--message", upgrade._commit_message( "global configuration", summary_override="Automatic upgrade to hash `abcd`", ), ]), call(["jf", "submit", "--update-fields"]), ] subprocess.assert_has_calls(calls) # paths passed from arguments will override the local configuration list # Therefore, we only read the first json configuration. subprocess.reset_mock() arguments.paths = ["foo/bar"] arguments.push_blocking_only = False arguments.submit = False with patch("json.dump") as dump: mocks = [ mock_open(read_data='{"version": "old"}').return_value, mock_open(read_data="{}").return_value, mock_open(read_data='{"push_blocking": false}').return_value, mock_open(read_data="{}").return_value, mock_open(read_data='{"push_blocking": true}').return_value, mock_open(read_data="{}").return_value, ] open_mock.side_effect = mocks upgrade.run_global_version_update(arguments) dump.assert_has_calls([ call({"version": "abcd"}, mocks[1], indent=2, sort_keys=True) ]) subprocess.assert_has_calls([])
def test_run_fixme(self, stdin_errors, run_errors, path_read_text, subprocess) -> None: arguments = MagicMock() arguments.comment = None arguments.max_line_length = 88 arguments.run = False arguments.truncate = True stdin_errors.return_value = [] run_errors.return_value = [] upgrade.run_fixme(arguments) # Test single error. with patch.object(pathlib.Path, "write_text") as path_write_text: errors = [{ "path": "path.py", "line": 1, "concise_description": "Error [1]: description", }] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = "1\n2" upgrade.run_fixme(arguments) path_write_text.assert_called_once_with( "# pyre-fixme[1]: description\n1\n2") # Generated files. with patch.object(pathlib.Path, "write_text") as path_write_text: errors = [{ "path": "path.py", "line": 2, "concise_description": "Error [1]: description", }] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = "# @" "generated\n1\n2\n" upgrade.run_fixme(arguments) path_write_text.assert_not_called() # Test single error with lint. arguments.run = True arguments.lint = True with patch.object(pathlib.Path, "write_text") as path_write_text: errors = [{ "path": "path.py", "line": 1, "concise_description": "Error [1]: description", }] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = "1\n2" upgrade.run_fixme(arguments) calls = [ call("# pyre-fixme[1]: description\n1\n2"), call("# pyre-fixme[1]: description\n1\n2"), ] path_write_text.assert_has_calls(calls) calls = [ call([ "arc", "lint", "--never-apply-patches", "--enforce-lint-clean", "--output", "none", ]), call().__bool__(), call(["arc", "lint", "--apply-patches", "--output", "none"]), ] subprocess.assert_has_calls(calls) arguments.run = False arguments.lint = False # Test error with comment. with patch.object(pathlib.Path, "write_text") as path_write_text: errors = [{ "path": "path.py", "line": 1, "concise_description": "Error [1]: description", }] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = "1\n2" arguments.comment = "T1234" upgrade.run_fixme(arguments) arguments.comment = None path_write_text.assert_called_once_with( "# pyre-fixme[1]: T1234\n1\n2") # Test multiple errors and multiple lines. with patch.object(pathlib.Path, "write_text") as path_write_text: errors = [ { "path": "path.py", "line": 1, "concise_description": "Error [1]: description", }, { "path": "path.py", "line": 2, "concise_description": "Error [1]: description", }, { "path": "path.py", "line": 2, "concise_description": "Error [2]: description", }, ] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = "1\n2" upgrade.run_fixme(arguments) path_write_text.assert_called_once_with( "# pyre-fixme[1]: description\n" "1\n" "# pyre-fixme[1]: description\n" "# pyre-fixme[2]: description\n" "2") with patch.object(pathlib.Path, "write_text") as path_write_text: errors = [ { "path": "path.py", "line": 2, "concise_description": "Error [10]: Description one.", }, { "path": "path.py", "line": 2, "concise_description": "Error [11]: Description two.", }, ] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = "1\n2" upgrade.run_fixme(arguments) path_write_text.assert_called_once_with( "1\n" "# pyre-fixme[10]: Description one.\n" "# pyre-fixme[11]: Description two.\n" "2") arguments.max_line_length = 40 with patch.object(pathlib.Path, "write_text") as path_write_text: errors = [ { "path": "path.py", "line": 2, "concise_description": "Error [1]: Description one.", }, { "path": "path.py", "line": 2, "concise_description": "Error [2]: Very long description two.", }, { "path": "path.py", "line": 2, "concise_description": "Error [3]: Very long description three.", }, { "path": "path.py", "line": 2, "concise_description": "Error [4]: Description four.", }, ] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = "1\n2" upgrade.run_fixme(arguments) path_write_text.assert_called_once_with( "1\n" "# pyre-fixme[1]: Description one.\n" "# pyre-fixme[2]: Very long descriptio...\n" "# pyre-fixme[3]: Very long descriptio...\n" "# pyre-fixme[4]: Description four.\n" "2") arguments.max_line_length = 36 arguments.truncate = False with patch.object(pathlib.Path, "write_text") as path_write_text: errors = [ { "path": "path.py", "line": 1, "concise_description": "Error [2]: Maximum characters.", }, { "path": "path.py", "line": 2, "concise_description": "Error [2]: Too many characters.", }, ] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = "1\n2" upgrade.run_fixme(arguments) path_write_text.assert_called_once_with( "# pyre-fixme[2]: Maximum characters.\n" "1\n" "# pyre-fixme[2]: Too many\n" "# characters.\n" "2") arguments.max_line_length = 40 arguments.truncate = False with patch.object(pathlib.Path, "write_text") as path_write_text: errors = [ { "path": "path.py", "line": 2, "concise_description": "Error [1]: Description one.", }, { "path": "path.py", "line": 2, "concise_description": "Error [2]: Very long description two.", }, { "path": "path.py", "line": 2, "concise_description": "Error [3]: Very long description three.", }, { "path": "path.py", "line": 2, "concise_description": "Error [4]: Description four.", }, ] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = "1\n2" upgrade.run_fixme(arguments) path_write_text.assert_called_once_with( "1\n" "# pyre-fixme[1]: Description one.\n" "# pyre-fixme[2]: Very long\n" "# description two.\n" "# pyre-fixme[3]: Very long\n" "# description three.\n" "# pyre-fixme[4]: Description four.\n" "2") arguments.truncate = True # Test errors in multiple files. arguments.max_line_length = 88 with patch.object(pathlib.Path, "write_text") as path_write_text: errors = [ { "path": "path.py", "line": 1, "concise_description": "Error [1]: description", }, { "path": "other.py", "line": 2, "concise_description": "Error [2]: description", }, ] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = "1\n2" upgrade.run_fixme(arguments) path_write_text.has_calls([ call("# pyre-fixme[1]: description\n1\n2"), call("1\n#pyre-fixme[2]: description\n2"), ]) # Test removal of extraneous ignore. with patch.object(pathlib.Path, "write_text") as path_write_text: errors = [{ "path": "path.py", "line": 1, "concise_description": "Error [0]: extraneous ignore", }] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = " # pyre-ignore[0]: [1, 2, 3]\n2" upgrade.run_fixme(arguments) arguments.comment = None path_write_text.assert_called_once_with("2") # Test removal of extraneous ignore. with patch.object(pathlib.Path, "write_text") as path_write_text: arguments.max_line_length = 30 errors = [{ "path": "path.py", "line": 1, "concise_description": "Error [0]: extraneous", }] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = ( " # pyre-ignore[0]: [1, 2, 3]\n# continuation comment\n2") upgrade.run_fixme(arguments) arguments.comment = None arguments.truncate = True path_write_text.assert_called_once_with("2") # We don't remove legitimate comments. with patch.object(pathlib.Path, "write_text") as path_write_text: arguments.max_line_length = 30 errors = [{ "path": "path.py", "line": 1, "concise_description": "Error [0]: extraneous", }] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = ( " # pyre-ignore[0]: [1, 2, 3]\n# user comment\n2") upgrade.run_fixme(arguments) arguments.comment = None arguments.truncate = True path_write_text.assert_called_once_with("# user comment\n2") with patch.object(pathlib.Path, "write_text") as path_write_text: arguments.max_line_length = 30 errors = [{ "path": "path.py", "line": 1, "concise_description": "Error [0]: extraneous ignore that's " "quite long", }] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = ( " # pyre-ignore[0]:\n# comment that doesn't fit on one line\n" "# pyre-ignore[1]:\n2") upgrade.run_fixme(arguments) arguments.comment = None arguments.truncate = True path_write_text.assert_called_once_with("# pyre-ignore[1]:\n2") with patch.object(pathlib.Path, "write_text") as path_write_text: errors = [{ "path": "path.py", "line": 1, "concise_description": "Error [0]: extraneous ignore", }] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = "# pyre-fixme[1]\n# pyre-fixme[2]\n2" upgrade.run_fixme(arguments) arguments.comment = None path_write_text.assert_called_once_with("# pyre-fixme[2]\n2") # Test removal of extraneous ignore (trailing comment). with patch.object(pathlib.Path, "write_text") as path_write_text: errors = [{ "path": "path.py", "line": 1, "concise_description": "Error [0]: extraneous ignore", }] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = "1# pyre-ignore[0]: [1, 2, 3]\n2" upgrade.run_fixme(arguments) arguments.comment = None path_write_text.assert_called_once_with("1\n2") # Test long lines. with patch.object(pathlib.Path, "write_text") as path_write_text: arguments_short = MagicMock() arguments_short.comment = None arguments_short.max_line_length = 35 arguments_short.run = False errors = [ { "path": "path.py", "line": 1, "concise_description": "Error [1]: description one, " "that has a pretty verbose text", }, { "path": "path.py", "line": 2, "concise_description": "Error [2]: description-that-will-not-break-" "even-when-facing-adversities", }, ] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = "line = 1\nline = 2\nline = 3" upgrade.run_fixme(arguments_short) path_write_text.assert_called_once_with( """# FIXME[1]: description one... line = 1 # FIXME[2]: description-tha... line = 2 line = 3""".replace(" ", "").replace("FIXME", "pyre-fixme")) # Fall back to normal description for backwards compatibility. with patch.object(pathlib.Path, "write_text") as path_write_text: errors = [{ "path": "path.py", "line": 1, "description": "Error [1]: description", "concise_description": "", }] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = "1\n2" upgrade.run_fixme(arguments) path_write_text.assert_called_once_with( "# pyre-fixme[1]: description\n1\n2") # Ensure that we prefer concise descriptions. with patch.object(pathlib.Path, "write_text") as path_write_text: errors = [{ "path": "path.py", "line": 1, "description": "Error [1]: description", "concise_description": "Error[1]: Concise.", }] stdin_errors.return_value = errors run_errors.return_value = errors path_read_text.return_value = "1\n2" upgrade.run_fixme(arguments) path_write_text.assert_called_once_with( "# pyre-fixme[1]: Concise.\n1\n2")
def test_run_fixme_all( self, fix, run_global_version_update, get_errors, remove_version, find_configuration, gather, subprocess, ) -> None: arguments = MagicMock() arguments.lint = False gather.return_value = [ upgrade.Configuration("local/.pyre_configuration.local", {"version": 123}) ] get_errors.return_value = [] upgrade.run_fixme_all(arguments) run_global_version_update.assert_not_called() fix.assert_not_called() subprocess.assert_called_once_with( ["hg", "commit", "--message", upgrade._commit_message("local")]) fix.reset_mock() subprocess.reset_mock() errors = [{ "line": 2, "column": 4, "path": "local.py", "code": 7, "name": "Kind", "concise_description": "Error", "inference": {}, "ignore_error": False, "external_to_global_root": False, }] get_errors.return_value = errors upgrade.run_fixme_all(arguments) run_global_version_update.assert_not_called() fix.called_once_with(arguments, upgrade.sort_errors(errors)) subprocess.assert_called_once_with( ["hg", "commit", "--message", upgrade._commit_message("local")]) # Test configuraton with no version set fix.reset_mock() subprocess.reset_mock() gather.return_value = [ upgrade.Configuration("local/.pyre_configuration.local", {}) ] upgrade.run_fixme_all(arguments) fix.assert_not_called() subprocess.assert_not_called() # Test with given hash fix.reset_mock() subprocess.reset_mock() gather.return_value = [ upgrade.Configuration("local/.pyre_configuration.local", {"version": 123}) ] arguments.hash = "abc" arguments.submit = True upgrade.run_fixme_all(arguments) run_global_version_update.assert_called_once_with(arguments) fix.called_once_with(arguments, upgrade.sort_errors(errors)) calls = [ call([ "hg", "commit", "--message", upgrade._commit_message("local") ]), call(["jf", "submit", "--update-fields"]), ] subprocess.assert_has_calls(calls) # Test with linting fix.reset_mock() subprocess.reset_mock() run_global_version_update.reset_mock() arguments.lint = True upgrade.run_fixme_all(arguments) run_global_version_update.assert_called_once_with(arguments) fix.called_once_with(arguments, upgrade.sort_errors(errors)) calls = [ call([ "hg", "commit", "--message", upgrade._commit_message("local") ]), call(["jf", "submit", "--update-fields"]), ] subprocess.assert_has_calls(calls)
def test_upgrade_project( self, fix, run_global_version_update, errors_from_stdin, gather, get_errors, remove_version, subprocess, ) -> None: arguments = MagicMock() arguments.lint = False arguments.from_stdin = False gather.return_value = [] upgrade.run_fixme_all(arguments) fix.assert_not_called() subprocess.assert_not_called() errors = [{ "line": 2, "column": 4, "path": "local.py", "code": 7, "name": "Kind", "concise_description": "Error", "inference": {}, "ignore_error": False, "external_to_global_root": False, }] get_errors.return_value = errors configuration = upgrade.Configuration( "/root/local/.pyre_configuration.local", {"version": 123}) configuration.get_path() upgrade._upgrade_project(arguments, configuration, "/root") run_global_version_update.assert_not_called() fix.called_once_with(arguments, upgrade.sort_errors(errors)) subprocess.assert_called_once_with( ["hg", "commit", "--message", upgrade._commit_message("local")]) # Test with lint subprocess.reset_mock() fix.reset_mock() arguments.from_stdin = False arguments.lint = True upgrade._upgrade_project(arguments, configuration, "/root") errors_from_stdin.assert_not_called() run_global_version_update.assert_not_called() fix.called_once_with(arguments, upgrade.sort_errors(errors)) calls = [ call([ "arc", "lint", "--never-apply-patches", "--enforce-lint-clean", "--output", "none", ]), call().__bool__(), call(["arc", "lint", "--apply-patches", "--output", "none"]), call([ "hg", "commit", "--message", upgrade._commit_message("local") ]), ] subprocess.assert_has_calls(calls) # Test with from_stdin and lint subprocess.reset_mock() fix.reset_mock() get_errors.reset_mock() arguments.from_stdin = True arguments.lint = True errors_from_stdin.return_value = errors get_errors.return_value = errors upgrade._upgrade_project(arguments, configuration, "/root") # Called in the first round to get initial errors errors_from_stdin.assert_called() # Called in the second round to get new errors after applying lint. get_errors.assert_called_once() run_global_version_update.assert_not_called() fix.called_once_with(arguments, upgrade.sort_errors(errors)) calls = [ call([ "arc", "lint", "--never-apply-patches", "--enforce-lint-clean", "--output", "none", ]), call().__bool__(), call(["arc", "lint", "--apply-patches", "--output", "none"]), call([ "hg", "commit", "--message", upgrade._commit_message("local") ]), ] subprocess.assert_has_calls(calls)
# KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from contextlib import contextmanager import os from unittest import mock import re class PartialEnv(dict): def __eq__(self, other): return self.items() <= other.items() _mock_call_type = type(mock.call()) def _ensure_mock_call_object(obj, **kwargs): if isinstance(obj, _mock_call_type): return obj elif isinstance(obj, str): cmd = re.split(r"\s+", obj) return mock.call(cmd, **kwargs) elif isinstance(obj, list): return mock.call(obj, **kwargs) else: raise TypeError(obj) @contextmanager
def with_verbosity_0_and_debug(config, expect): helpers.init(format='%(message)s', verbosity=0, debug=True) expect(config.mock_calls) == [call(format='%(message)s', level=10)]
def test_update_auth0_user_metadata_with_users_returned(self) -> None: with self.app.test_request_context(): user_1 = generate_fake_user_restrictions( self.region_code, "*****@*****.**", allowed_supervision_location_ids="23", ) user_2 = generate_fake_user_restrictions( self.region_code, "*****@*****.**", allowed_supervision_location_ids="11, EP, 4E", ) add_users_to_database_session(self.database_key, [user_1, user_2]) self.mock_auth0_client.get_all_users_by_email_addresses.return_value = [ { "email": "*****@*****.**", "user_id": "0" }, { "email": "*****@*****.**", "user_id": "1" }, ] response = self.client.get( self.update_auth0_user_metadata_url, headers=self.headers, query_string={"region_code": self.region_code}, ) self.mock_auth0_client.get_all_users_by_email_addresses.assert_called_with( [ "*****@*****.**", "*****@*****.**", ]) self.mock_auth0_client.update_user_app_metadata.assert_has_calls([ call( user_id="0", app_metadata={ "allowed_supervision_location_ids": ["23"], "allowed_supervision_location_level": "level_1_supervision_location", "can_access_leadership_dashboard": True, "can_access_case_triage": False, }, ), call( user_id="1", app_metadata={ "allowed_supervision_location_ids": ["11", "EP", "4E"], "allowed_supervision_location_level": "level_1_supervision_location", "can_access_leadership_dashboard": True, "can_access_case_triage": False, }, ), ]) self.assertEqual(HTTPStatus.OK, response.status_code) self.assertEqual( b"Finished updating 2 auth0 users with restrictions for region US_MO", response.data, )
async def test_number(hass, zha_device_joined_restored, zigpy_analog_output_device): """Test zha number platform.""" cluster = zigpy_analog_output_device.endpoints.get(1).analog_output cluster.PLUGGED_ATTR_READS = { "max_present_value": 100.0, "min_present_value": 1.0, "relinquish_default": 50.0, "resolution": 1.1, "description": "PWM1", "engineering_units": 98, "application_type": 4 * 0x10000, } update_attribute_cache(cluster) cluster.PLUGGED_ATTR_READS["present_value"] = 15.0 zha_device = await zha_device_joined_restored(zigpy_analog_output_device) # one for present_value and one for the rest configuration attributes assert cluster.read_attributes.call_count == 3 attr_reads = set() for call_args in cluster.read_attributes.call_args_list: attr_reads |= set(call_args[0][0]) assert "max_present_value" in attr_reads assert "min_present_value" in attr_reads assert "relinquish_default" in attr_reads assert "resolution" in attr_reads assert "description" in attr_reads assert "engineering_units" in attr_reads assert "application_type" in attr_reads entity_id = await find_entity_id(Platform.NUMBER, zha_device, hass) assert entity_id is not None await async_enable_traffic(hass, [zha_device], enabled=False) # test that the number was created and that it is unavailable assert hass.states.get(entity_id).state == STATE_UNAVAILABLE # allow traffic to flow through the gateway and device assert cluster.read_attributes.call_count == 3 await async_enable_traffic(hass, [zha_device]) await hass.async_block_till_done() assert cluster.read_attributes.call_count == 6 # test that the state has changed from unavailable to 15.0 assert hass.states.get(entity_id).state == "15.0" # test attributes assert hass.states.get(entity_id).attributes.get("min") == 1.0 assert hass.states.get(entity_id).attributes.get("max") == 100.0 assert hass.states.get(entity_id).attributes.get("step") == 1.1 assert hass.states.get(entity_id).attributes.get("icon") == "mdi:percent" assert hass.states.get(entity_id).attributes.get("unit_of_measurement") == "%" assert ( hass.states.get(entity_id).attributes.get("friendly_name") == "FakeManufacturer FakeModel e769900a analog_output PWM1" ) # change value from device assert cluster.read_attributes.call_count == 6 await send_attributes_report(hass, cluster, {0x0055: 15}) assert hass.states.get(entity_id).state == "15.0" # update value from device await send_attributes_report(hass, cluster, {0x0055: 20}) assert hass.states.get(entity_id).state == "20.0" # change value from HA with patch( "zigpy.zcl.Cluster.write_attributes", return_value=mock_coro([zcl_f.Status.SUCCESS, zcl_f.Status.SUCCESS]), ): # set value via UI await hass.services.async_call( Platform.NUMBER, "set_value", {"entity_id": entity_id, "value": 30.0}, blocking=True, ) assert len(cluster.write_attributes.mock_calls) == 1 assert cluster.write_attributes.call_args == call({"present_value": 30.0}) cluster.PLUGGED_ATTR_READS["present_value"] = 30.0 # test rejoin assert cluster.read_attributes.call_count == 6 await async_test_rejoin(hass, zigpy_analog_output_device, [cluster], (1,)) assert hass.states.get(entity_id).state == "30.0" assert cluster.read_attributes.call_count == 9 # update device value with failed attribute report cluster.PLUGGED_ATTR_READS["present_value"] = 40.0 # validate the entity still contains old value assert hass.states.get(entity_id).state == "30.0" await async_setup_component(hass, "homeassistant", {}) await hass.async_block_till_done() await hass.services.async_call( "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True ) assert hass.states.get(entity_id).state == "40.0" assert cluster.read_attributes.call_count == 10 assert "present_value" in cluster.read_attributes.call_args[0][0]
def test_output_handler_metric_names(dirname): wrapper = OutputHandler("tag", metric_names=["a", "b"]) mock_logger = MagicMock(spec=TensorboardLogger) mock_logger.writer = MagicMock() mock_engine = MagicMock() mock_engine.state = State(metrics={"a": 12.23, "b": 23.45}) mock_engine.state.iteration = 5 wrapper(mock_engine, mock_logger, Events.ITERATION_STARTED) assert mock_logger.writer.add_scalar.call_count == 2 mock_logger.writer.add_scalar.assert_has_calls([ call("tag/a", 12.23, 5), call("tag/b", 23.45, 5), ], any_order=True) wrapper = OutputHandler("tag", metric_names=[ "a", ]) mock_engine = MagicMock() mock_engine.state = State( metrics={"a": torch.Tensor([0.0, 1.0, 2.0, 3.0])}) mock_engine.state.iteration = 5 mock_logger = MagicMock(spec=TensorboardLogger) mock_logger.writer = MagicMock() wrapper(mock_engine, mock_logger, Events.ITERATION_STARTED) assert mock_logger.writer.add_scalar.call_count == 4 mock_logger.writer.add_scalar.assert_has_calls( [ call("tag/a/0", 0.0, 5), call("tag/a/1", 1.0, 5), call("tag/a/2", 2.0, 5), call("tag/a/3", 3.0, 5), ], any_order=True, ) wrapper = OutputHandler("tag", metric_names=["a", "c"]) mock_engine = MagicMock() mock_engine.state = State(metrics={"a": 55.56, "c": "Some text"}) mock_engine.state.iteration = 7 mock_logger = MagicMock(spec=TensorboardLogger) mock_logger.writer = MagicMock() with pytest.warns(UserWarning): wrapper(mock_engine, mock_logger, Events.ITERATION_STARTED) assert mock_logger.writer.add_scalar.call_count == 1 mock_logger.writer.add_scalar.assert_has_calls([ call("tag/a", 55.56, 7), ], any_order=True) # all metrics wrapper = OutputHandler("tag", metric_names="all") mock_logger = MagicMock(spec=TensorboardLogger) mock_logger.writer = MagicMock() mock_engine = MagicMock() mock_engine.state = State(metrics={"a": 12.23, "b": 23.45}) mock_engine.state.iteration = 5 wrapper(mock_engine, mock_logger, Events.ITERATION_STARTED) assert mock_logger.writer.add_scalar.call_count == 2 mock_logger.writer.add_scalar.assert_has_calls([ call("tag/a", 12.23, 5), call("tag/b", 23.45, 5), ], any_order=True)
def test_riddle_print_question(self): with patch('sys.stdout', Mock()) as mock_print: self.riddler.riddle() mock_print.assert_has_calls(call('Маленький, серенький, на слона похож.'), call('У вас 3 попытки!'))
def with_verbosity_3(config, expect): helpers.init(format='%(message)s', verbosity=3) expect(config.mock_calls) == [call(format='%(message)s', level=10)]
def test_cosine_restart_lr_update_hook(): """Test CosineRestartLrUpdaterHook.""" with pytest.raises(AssertionError): # either `min_lr` or `min_lr_ratio` should be specified CosineRestartLrUpdaterHook(by_epoch=False, periods=[2, 10], restart_weights=[0.5, 0.5], min_lr=0.1, min_lr_ratio=0) with pytest.raises(AssertionError): # periods and restart_weights should have the same length CosineRestartLrUpdaterHook(by_epoch=False, periods=[2, 10], restart_weights=[0.5], min_lr_ratio=0) with pytest.raises(ValueError): # the last cumulative_periods 7 (out of [5, 7]) should >= 10 sys.modules['pavi'] = MagicMock() loader = DataLoader(torch.ones((10, 2))) runner = _build_demo_runner() # add cosine restart LR scheduler hook = CosineRestartLrUpdaterHook( by_epoch=False, periods=[5, 2], # cumulative_periods [5, 7 (5 + 2)] restart_weights=[0.5, 0.5], min_lr=0.0001) runner.register_hook(hook) runner.register_hook(IterTimerHook()) # add pavi hook hook = PaviLoggerHook(interval=1, add_graph=False, add_last_ckpt=True) runner.register_hook(hook) runner.run([loader], [('train', 1)]) shutil.rmtree(runner.work_dir) sys.modules['pavi'] = MagicMock() loader = DataLoader(torch.ones((10, 2))) runner = _build_demo_runner() # add cosine restart LR scheduler hook = CosineRestartLrUpdaterHook(by_epoch=False, periods=[5, 5], restart_weights=[0.5, 0.5], min_lr_ratio=0) runner.register_hook(hook) runner.register_hook(IterTimerHook()) # add pavi hook hook = PaviLoggerHook(interval=1, add_graph=False, add_last_ckpt=True) runner.register_hook(hook) runner.run([loader], [('train', 1)]) shutil.rmtree(runner.work_dir) # TODO: use a more elegant way to check values assert hasattr(hook, 'writer') calls = [ call('train', { 'learning_rate': 0.01, 'momentum': 0.95 }, 1), call('train', { 'learning_rate': 0.0, 'momentum': 0.95 }, 6), call('train', { 'learning_rate': 0.0009549150281252633, 'momentum': 0.95 }, 10) ] hook.writer.add_scalars.assert_has_calls(calls, any_order=True)
def test__heartbeat_query_errors__subscription_recreated(self): token1 = "test subscription" token2 = "second test subscription" paths = ["tag1", "tag2", "tag3"] timer = mock.MagicMock(ManualResetTimer, wraps=ManualResetTimer.null_timer) type(timer).elapsed = events.events._EventSlot("elapsed") self._client.all_requests.configure_mock( side_effect=self._get_mock_request(token1, {}) ) uut = HttpTagSubscription.create( self._client, paths, ManualResetTimer.null_timer, timer ) assert uut assert timer.start.call_count == 1 assert self._client.all_requests.call_count == 2 assert self._client.all_requests.call_args_list == [ mock.call( "POST", "/nitag/v2/subscriptions", params=None, data={"tags": paths, "updatesOnly": True}, ), mock.call( "GET", "/nitag/v2/subscriptions/{id}/values/current", params={"id": token1}, ), ] self._client.all_requests.configure_mock( side_effect=self._get_mock_request( token2, {}, core.ApiException("Unknown subscription") ) ) timer.elapsed() assert timer.start.call_count == 2 assert self._client.all_requests.call_count == 5 assert self._client.all_requests.call_args_list[-3:] == [ mock.call( "PUT", "/nitag/v2/subscriptions/{id}/heartbeat", params={"id": token1}, data=None, ), mock.call( "POST", "/nitag/v2/subscriptions", params=None, data={"tags": paths, "updatesOnly": True}, ), mock.call( "GET", "/nitag/v2/subscriptions/{id}/values/current", params={"id": token2}, ), ] self._client.all_requests.configure_mock( side_effect=self._get_mock_request(None, None) ) timer.elapsed() assert timer.start.call_count == 3 assert self._client.all_requests.call_count == 6 assert self._client.all_requests.call_args_list[-1:] == [ mock.call( "PUT", "/nitag/v2/subscriptions/{id}/heartbeat", params={"id": token2}, data=None, ), ]
def test_configurations( self, config_defaults, os_access, os_path_exists, os_path_isdir, os_path_isfile ) -> None: # Assume all paths are valid. os_access.return_value = True os_path_exists.return_value = True # Try with directories first. os_path_isdir.return_value = True os_path_isfile.return_value = False with patch.object(Configuration, "_read") as Configuration_read: configuration = Configuration() Configuration_read.assert_has_calls( [ call(CONFIGURATION_FILE + ".local", path_from_root=""), call(CONFIGURATION_FILE, path_from_root=""), ] ) self.assertEqual(configuration.get_local_configuration(), None) with patch.object(Configuration, "_read") as Configuration_read: configuration = Configuration(original_directory="original") Configuration_read.assert_has_calls( [ call( "original/" + CONFIGURATION_FILE + ".local", path_from_root="original", ), call(CONFIGURATION_FILE + ".local", path_from_root=""), call(CONFIGURATION_FILE, path_from_root=""), ] ) self.assertEqual( configuration.get_local_configuration(), "original/" + CONFIGURATION_FILE + ".local", ) with patch.object(Configuration, "_read") as Configuration_read: configuration = Configuration(local_configuration="local") Configuration_read.assert_has_calls( [ call( "local/" + CONFIGURATION_FILE + ".local", path_from_root="local" ), call(CONFIGURATION_FILE + ".local", path_from_root=""), call(CONFIGURATION_FILE, path_from_root=""), ] ) self.assertEqual( configuration.get_local_configuration(), "local/" + CONFIGURATION_FILE + ".local", ) with patch.object(Configuration, "_read") as Configuration_read: configuration = Configuration( original_directory="original", local_configuration="local" ) Configuration_read.assert_has_calls( [ call( "local/" + CONFIGURATION_FILE + ".local", path_from_root="local" ), call(CONFIGURATION_FILE + ".local", path_from_root=""), call(CONFIGURATION_FILE, path_from_root=""), ] ) self.assertEqual( configuration.get_local_configuration(), "local/" + CONFIGURATION_FILE + ".local", ) # Try with regular configuration files then. os_path_isdir.return_value = False os_path_isfile.return_value = True with patch.object(Configuration, "_read") as Configuration_read: configuration = Configuration( local_configuration="local/.some_configuration" ) Configuration_read.assert_has_calls( [ call("local/.some_configuration", path_from_root="local"), call(CONFIGURATION_FILE + ".local", path_from_root=""), call(CONFIGURATION_FILE, path_from_root=""), ] ) self.assertEqual( configuration.get_local_configuration(), "local/.some_configuration" )
def test_agent_deletion(): policy = create_mock_policy() tqueue = mock.Mock() name_behavior_id = "test_brain_name" processor = AgentProcessor( policy, name_behavior_id, max_trajectory_length=5, stats_reporter=StatsReporter("testcat"), ) fake_action_outputs = { "action": ActionTuple(continuous=np.array([[0.1]])), "entropy": np.array([1.0], dtype=np.float32), "learning_rate": 1.0, "log_probs": LogProbsTuple(continuous=np.array([[0.1]])), } mock_decision_step, mock_terminal_step = mb.create_mock_steps( num_agents=1, observation_specs=create_observation_specs_with_shapes([(8, )]), action_spec=ActionSpec.create_continuous(2), ) mock_done_decision_step, mock_done_terminal_step = mb.create_mock_steps( num_agents=1, observation_specs=create_observation_specs_with_shapes([(8, )]), action_spec=ActionSpec.create_continuous(2), done=True, ) fake_action_info = ActionInfo( action=ActionTuple(continuous=np.array([[0.1]])), env_action=ActionTuple(continuous=np.array([[0.1]])), value=[0.1], outputs=fake_action_outputs, agent_ids=mock_decision_step.agent_id, ) processor.publish_trajectory_queue(tqueue) # This is like the initial state after the env reset processor.add_experiences(mock_decision_step, mock_terminal_step, 0, ActionInfo.empty()) # Run 3 trajectories, with different workers (to simulate different agents) add_calls = [] remove_calls = [] for _ep in range(3): for _ in range(5): processor.add_experiences(mock_decision_step, mock_terminal_step, _ep, fake_action_info) add_calls.append( mock.call([get_global_agent_id(_ep, 0)], fake_action_outputs["action"])) processor.add_experiences(mock_done_decision_step, mock_done_terminal_step, _ep, fake_action_info) # Make sure we don't add experiences from the prior agents after the done remove_calls.append(mock.call([get_global_agent_id(_ep, 0)])) policy.save_previous_action.assert_has_calls(add_calls) policy.remove_previous_action.assert_has_calls(remove_calls) # Check that there are no experiences left assert len(processor.experience_buffers.keys()) == 0 assert len(processor.last_take_action_outputs.keys()) == 0 assert len(processor.episode_steps.keys()) == 0 assert len(processor.episode_rewards.keys()) == 0 assert len(processor.last_step_result.keys()) == 0 # check that steps with immediate dones don't add to dicts processor.add_experiences(mock_done_decision_step, mock_done_terminal_step, 0, ActionInfo.empty()) assert len(processor.experience_buffers.keys()) == 0 assert len(processor.last_take_action_outputs.keys()) == 0 assert len(processor.episode_steps.keys()) == 0 assert len(processor.episode_rewards.keys()) == 0 assert len(processor.last_step_result.keys()) == 0
def test_calls_auth_login_with_user_if_there_is_one(self, mock_auth): response = self.client.get("/accounts/login?token=abc123") self.assertEqual( mock_auth.login.call_args, call(response.wsgi_request, mock_auth.authenticate.return_value) )
def test_build_with_authenticated_proxies(self, run_mock): env_vars = ( ('http_proxy', 'http://*****:*****@localhost:3132'), ('https_proxy', 'http://*****:*****@localhost:3133'), ('no_proxy', None), ) for v in env_vars: self.useFixture(fixtures.EnvironmentVariable(v[0], v[1])) plugin = maven.MavenPlugin('test-part', self.options, self.project_options) def side(l): os.makedirs(os.path.join(plugin.builddir, 'target')) open(os.path.join(plugin.builddir, 'target', 'dummy.jar'), 'w').close() run_mock.side_effect = side settings_path = os.path.join(plugin.partdir, 'm2', 'settings.xml') os.makedirs(plugin.sourcedir) plugin.build() run_mock.assert_has_calls([ mock.call(['mvn', 'package', '-s', settings_path]), ]) self.assertTrue(os.path.exists(settings_path), 'expected {!r} to exist'.format(settings_path)) with open(settings_path) as f: settings_contents = f.read() expected_contents = ( '<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"\n' ' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\n' ' xsi:schemaLocation="http://maven.apache.org/SETTINGS/' '1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">\n' ' <interactiveMode>false</interactiveMode>\n' ' <proxies>\n' ' <proxy>\n' ' <id>http_proxy</id>\n' ' <active>true</active>\n' ' <protocol>http</protocol>\n' ' <host>localhost</host>\n' ' <port>3132</port>\n' ' <username>user1</username>\n' ' <password>pass1</password>\n' ' <nonProxyHosts>localhost</nonProxyHosts>\n' ' </proxy>\n' ' <proxy>\n' ' <id>https_proxy</id>\n' ' <active>true</active>\n' ' <protocol>https</protocol>\n' ' <host>localhost</host>\n' ' <port>3133</port>\n' ' <username>user2</username>\n' ' <password>pass2</password>\n' ' <nonProxyHosts>localhost</nonProxyHosts>\n' ' </proxy>\n' ' </proxies>\n' '</settings>\n') self.assertSettingsEqual(expected_contents, settings_contents)
def test_validate_and_inject_namespace__fls_lookups_update_failure( self, mock_field, mock_sobject ): ms = parse_from_yaml( StringIO( """Insert Accounts: sf_object: Account table: Account fields: - Name lookups: Lookup__c: table: Stuff after: Insert Stuff""" ) )["Insert Accounts"] org_config = mock.Mock() org_config.salesforce_client.describe.return_value = { "sobjects": [{"name": "Account", "createable": True}] } org_config.salesforce_client.Account.describe.return_value = { "fields": [ {"name": "Name", "createable": True}, {"name": "Lookup__c", "updateable": False, "createable": True}, ] } assert not ms.validate_and_inject_namespace( org_config, "ns", DataOperationType.INSERT ) ms._validate_sobject.assert_called_once_with( {"Account": {"name": "Account", "createable": True}}, None, DataOperationType.INSERT, ) ms._validate_field_dict.assert_has_calls( [ mock.call( { "Name": {"name": "Name", "createable": True}, "Lookup__c": { "name": "Lookup__c", "updateable": False, "createable": True, }, }, {"Name": "Name"}, None, None, False, DataOperationType.INSERT, ), mock.call( { "Name": {"name": "Name", "createable": True}, "Lookup__c": { "name": "Lookup__c", "updateable": False, "createable": True, }, }, ms.lookups, None, None, False, DataOperationType.INSERT, ), ] )
def test_assume_yes_using_y_flag_using_checkpoint_delete( mock_emit, caplog, monkeypatch, empty_context_with_checkpoint_v1_stats_enabled, ): """ What does this test and why? All versions of the --assume-yes flag (--assume-yes/--yes/-y) should behave the same. """ context: DataContext = empty_context_with_checkpoint_v1_stats_enabled monkeypatch.chdir(os.path.dirname(context.root_directory)) runner: CliRunner = CliRunner(mix_stderr=False) checkpoint_name: str = "my_v1_checkpoint" result: Result = runner.invoke( cli, f"--v3-api -y checkpoint delete {checkpoint_name}", catch_exceptions=False, ) stdout: str = result.stdout assert result.exit_code == 0 assert ( f'Are you sure you want to delete the Checkpoint "{checkpoint_name}" (this action is irreversible)?' not in stdout ) # This assertion is extra assurance since this test is too permissive if we change the confirmation message assert "[Y/n]" not in stdout assert 'Checkpoint "my_v1_checkpoint" deleted.' in stdout expected_call_args_list = [ mock.call( {"event_payload": {}, "event": "data_context.__init__", "success": True} ), mock.call( { "event": "cli.checkpoint.delete.begin", "event_payload": {"api_version": "v3"}, "success": True, } ), mock.call( { "event": "cli.checkpoint.delete.end", "event_payload": {"api_version": "v3"}, "success": True, } ), ] assert mock_emit.call_count == len(expected_call_args_list) assert mock_emit.call_args_list == expected_call_args_list assert_no_logging_messages_or_tracebacks( caplog, result, ) result = runner.invoke( cli, f"--v3-api checkpoint list", catch_exceptions=False, ) stdout = result.stdout assert result.exit_code == 0 assert "No Checkpoints found." in stdout
def test_log_current_versions(self, mock_log): self.subject(["streamlink", "--loglevel", "info"]) self.assertEqual(mock_log.debug.mock_calls, [], "Doesn't log anything if not debug logging") with patch("sys.platform", "linux"), \ patch("platform.platform", Mock(return_value="linux")): self.subject(["streamlink", "--loglevel", "debug"]) self.assertEqual(mock_log.debug.mock_calls[:4], [ call("OS: linux"), call("Python: python"), call("Streamlink: streamlink"), call("Requests(requests), Socks(socks), Websocket(websocket)") ]) mock_log.debug.reset_mock() with patch("sys.platform", "darwin"), \ patch("platform.mac_ver", Mock(return_value=["0.0.0"])): self.subject(["streamlink", "--loglevel", "debug"]) self.assertEqual(mock_log.debug.mock_calls[:4], [ call("OS: macOS 0.0.0"), call("Python: python"), call("Streamlink: streamlink"), call("Requests(requests), Socks(socks), Websocket(websocket)") ]) mock_log.debug.reset_mock() with patch("sys.platform", "win32"), \ patch("platform.system", Mock(return_value="Windows")), \ patch("platform.release", Mock(return_value="0.0.0")): self.subject(["streamlink", "--loglevel", "debug"]) self.assertEqual(mock_log.debug.mock_calls[:4], [ call("OS: Windows 0.0.0"), call("Python: python"), call("Streamlink: streamlink"), call("Requests(requests), Socks(socks), Websocket(websocket)") ]) mock_log.debug.reset_mock()
def test_calls_authenticate_with_uid_from_get_request(self, mock_auth): self.client.get("/accounts/login?token=abc123") self.assertEqual( mock_auth.authenticate.call_args, call("abc123") )
def test_includeme(): config = Mock(spec_set=["add_route"]) includeme(config) # This may look like a ridiculous test, but the cost of keeping it # up-to-date is hopefully pretty low (run the tests with -vv, copy the new # expected value, strip out any Unicode prefixes) and it serves as a check # to ensure that any changes made to the routes were intended. calls = [ call("index", "/"), call("robots", "/robots.txt"), call("via_redirect", "/via"), call("login", "/login"), call("logout", "/logout"), call("signup", "/signup"), call("activate", "/activate/{id}/{code}"), call("forgot_password", "/forgot-password"), call("account_reset", "/account/reset"), call("account_reset_with_code", "/account/reset/{code}"), call("account", "/account/settings"), call("account_profile", "/account/profile"), call("account_notifications", "/account/settings/notifications"), call("account_developer", "/account/developer"), call("claim_account_legacy", "/claim_account/{token}"), call("dismiss_sidebar_tutorial", "/app/dismiss_sidebar_tutorial"), call("activity.search", "/search"), call( "activity.user_search", "/users/{username}", factory="h.traversal:UserRoot", traverse="/{username}", ), call("admin.index", "/admin/"), call("admin.admins", "/admin/admins"), call("admin.badge", "/admin/badge"), call("admin.features", "/admin/features"), call("admin.cohorts", "/admin/features/cohorts"), call("admin.cohorts_edit", "/admin/features/cohorts/{id}"), call("admin.groups", "/admin/groups"), call("admin.groups_create", "/admin/groups/new"), call( "admin.groups_delete", "/admin/groups/delete/{id}", factory="h.traversal.GroupRoot", traverse="/{id}", ), call( "admin.groups_edit", "/admin/groups/{id}", factory="h.traversal.GroupRoot", traverse="/{id}", ), call("admin.mailer", "/admin/mailer"), call("admin.mailer_test", "/admin/mailer/test"), call("admin.nipsa", "/admin/nipsa"), call("admin.oauthclients", "/admin/oauthclients"), call("admin.oauthclients_create", "/admin/oauthclients/new"), call( "admin.oauthclients_edit", "/admin/oauthclients/{id}", factory="h.traversal.AuthClientRoot", traverse="/{id}", ), call("admin.organizations", "/admin/organizations"), call("admin.organizations_create", "/admin/organizations/new"), call( "admin.organizations_delete", "/admin/organizations/delete/{pubid}", factory="h.traversal.OrganizationRoot", traverse="/{pubid}", ), call( "admin.organizations_edit", "/admin/organizations/{pubid}", factory="h.traversal.OrganizationRoot", traverse="/{pubid}", ), call("admin.staff", "/admin/staff"), call("admin.users", "/admin/users"), call("admin.users_activate", "/admin/users/activate"), call("admin.users_delete", "/admin/users/delete"), call("admin.users_rename", "/admin/users/rename"), call("admin.search", "/admin/search"), call( "annotation", "/a/{id}", factory="h.traversal:AnnotationRoot", traverse="/{id}", ), call("stream", "/stream"), call("stream.user_query", "/u/{user}"), call("stream.tag_query", "/t/{tag}"), call("assets", "/assets/*subpath"), call("api.index", "/api/"), call("api.links", "/api/links"), call( "api.annotations", "/api/annotations", factory="h.traversal:AnnotationRoot" ), call( "api.annotation", "/api/annotations/{id:[A-Za-z0-9_-]{20,22}}", factory="h.traversal:AnnotationRoot", traverse="/{id}", ), call( "api.annotation_flag", "/api/annotations/{id:[A-Za-z0-9_-]{20,22}}/flag", factory="h.traversal:AnnotationRoot", traverse="/{id}", ), call( "api.annotation_hide", "/api/annotations/{id:[A-Za-z0-9_-]{20,22}}/hide", factory="h.traversal:AnnotationRoot", traverse="/{id}", ), call( "api.annotation.jsonld", "/api/annotations/{id:[A-Za-z0-9_-]{20,22}}.jsonld", factory="h.traversal:AnnotationRoot", traverse="/{id}", ), call( "api.bulk", "/api/bulk", request_method="POST", factory="h.traversal.BulkAPIRoot", ), call("api.groups", "/api/groups", factory="h.traversal.GroupRoot"), call( "api.group_upsert", "/api/groups/{id}", request_method="PUT", factory="h.traversal.GroupUpsertRoot", traverse="/{id}", ), call( "api.group", "/api/groups/{id}", request_method=("GET", "PATCH"), factory="h.traversal.GroupRoot", traverse="/{id}", ), call("api.profile", "/api/profile", factory="h.traversal.ProfileRoot"), call("api.profile_groups", "/api/profile/groups"), call("api.debug_token", "/api/debug-token"), call( "api.group_members", "/api/groups/{pubid}/members", factory="h.traversal.GroupRoot", traverse="/{pubid}", ), call( "api.group_member", "/api/groups/{pubid}/members/{userid}", factory="h.traversal.GroupRoot", traverse="/{pubid}", ), call("api.search", "/api/search"), call("api.users", "/api/users", factory="h.traversal.UserRoot"), call( "api.user_read", "/api/users/{userid}", request_method="GET", factory="h.traversal.UserUserIDRoot", traverse="/{userid}", ), call( "api.user", "/api/users/{username}", factory="h.traversal.UserRoot", traverse="/{username}", ), call("badge", "/api/badge"), call("token", "/api/token"), call("oauth_authorize", "/oauth/authorize"), call("oauth_revoke", "/oauth/revoke"), call("sidebar_app", "/app.html"), call("notebook_app", "/notebook"), call("embed", "/embed.js"), call("stream_atom", "/stream.atom"), call("stream_rss", "/stream.rss"), call( "organization_logo", "/organizations/{pubid}/logo", factory="h.traversal.OrganizationLogoRoot", traverse="/{pubid}", ), call("group_create", "/groups/new"), call( "group_edit", "/groups/{pubid}/edit", factory="h.traversal.GroupRoot", traverse="/{pubid}", ), call( "group_read", "/groups/{pubid}/{slug:[^/]*}", factory="h.traversal.GroupRoot", traverse="/{pubid}", ), call( "group_read_noslug", "/groups/{pubid}", factory="h.traversal.GroupRoot", traverse="/{pubid}", ), call("help", "/docs/help"), call("onboarding", "/welcome/"), call("custom_onboarding", "/welcome/{slug}"), call("unsubscribe", "/notification/unsubscribe/{token}"), call("status", "/_status"), call("about", "/about/", static=True), call("bioscience", "/bioscience/", static=True), call("blog", "/blog/", static=True), call( "chrome-extension", "https://chrome.google.com/webstore/detail/bjfhmglciegochdpefhhlphglcehbmek", static=True, ), call("contact", "/contact/", static=True), call("contribute", "/contribute/", static=True), call("education", "/education/", static=True), call("for-publishers", "/for-publishers/", static=True), call("fund", "/fund/", static=True), call("help-center", "/help/", static=True), call("hypothesis-github", "https://github.com/hypothesis", static=True), call("hypothesis-twitter", "https://twitter.com/hypothes_is", static=True), call("jobs", "/jobs/", static=True), call("press", "/press/", static=True), call("privacy", "/privacy/", static=True), call("roadmap", "/roadmap/", static=True), call("team", "/team/", static=True), call("terms-of-service", "/terms-of-service/", static=True), call( "wordpress-plugin", "https://wordpress.org/plugins/hypothesis/", static=True ), ] # Test each one one at a time to make it a bit easier to spot which one # isn't in the list for single_call in calls: assert single_call in config.add_route.mock_calls # Then we can assert the order here assert config.add_route.mock_calls == calls
def test_validate_and_inject_namespace__injection_lookups( self, mock_field, mock_sobject ): ms = parse_from_yaml( StringIO( """Insert Accounts: sf_object: Account table: Account fields: - Name lookups: Lookup__c: table: Stuff""" ) )["Insert Accounts"] org_config = mock.Mock() org_config.salesforce_client.describe.return_value = { "sobjects": [{"name": "Account", "createable": True}] } org_config.salesforce_client.Account.describe.return_value = { "fields": [ {"name": "Name", "createable": True}, {"name": "ns__Lookup__c", "updateable": False, "createable": True}, ] } assert ms.validate_and_inject_namespace( org_config, "ns", DataOperationType.INSERT, inject_namespaces=True ) ms._validate_sobject.assert_called_once_with( CaseInsensitiveDict({"Account": {"name": "Account", "createable": True}}), mock.ANY, # local function def DataOperationType.INSERT, ) ms._validate_field_dict.assert_has_calls( [ mock.call( { "Name": {"name": "Name", "createable": True}, "ns__Lookup__c": { "name": "ns__Lookup__c", "updateable": False, "createable": True, }, }, ms.fields, mock.ANY, # local function def. mock.ANY, # local function def. False, DataOperationType.INSERT, ), mock.call( { "Name": {"name": "Name", "createable": True}, "ns__Lookup__c": { "name": "ns__Lookup__c", "updateable": False, "createable": True, }, }, ms.lookups, mock.ANY, # local function def. mock.ANY, # local function def. False, DataOperationType.INSERT, ), ] )
def test_hide_multiple_plots_calls_hide_in_model(self): self.view.get_all_selected_plot_numbers = mock.Mock(return_value=[1, 2]) self.presenter.hide_selected_plots() self.assertEquals(self.model.hide_plot.mock_calls[0], mock.call(1)) self.assertEquals(self.model.hide_plot.mock_calls[1], mock.call(2))
def test_log_root_warning(self, mock_log): self.subject(["streamlink"]) self.assertEqual(mock_log.info.mock_calls, [call("streamlink is running as root! Be careful!")])
def test_show_multiple_plots_shows_them(self): self.view.get_all_selected_plot_numbers = mock.Mock(return_value=[1, 2]) self.presenter.show_multiple_selected() self.assertEqual(self.model.show_plot.mock_calls[0], mock.call(1)) self.assertEqual(self.model.show_plot.mock_calls[1], mock.call(2))
def convert_list_to_calls(self, list_to_convert): call_list = [] for item in list_to_convert: call_list.append(mock.call(item)) return call_list