async def test_load_hassio(hass): """Test that we load Hass.io component.""" with patch.dict(os.environ, {}, clear=True): assert bootstrap._get_components(hass, {}) == set() with patch.dict(os.environ, {'HASSIO': '1'}): assert bootstrap._get_components(hass, {}) == {'hassio'}
def test_search_tags_silent_forced(app, settings, tracks, tracks_volume): assert settings['karakara.search.tag.silent_forced'] == [] assert settings['karakara.search.tag.silent_hidden'] == [] data = app.get('/search_list/.json').json['data'] assert len(data['trackids']) == 19 # Test silent_forced - restrict down to 'category:anime' tracks # - t1 and t2 are the only two tracks tags as anime with patch.dict(settings, {'karakara.search.tag.silent_forced': ['category:anime']}): assert settings['karakara.search.tag.silent_forced'] == ['category:anime'] data = app.get('/search_list/.json').json['data'] assert len(data['trackids']) == 2 assert 't1' in data['trackids'] # Test silent_hidden - hide tracks with tag 'category:anime' # - t1 and t2 are the only two tracks tags as anime they should be hidden in the response with patch.dict(settings, {'karakara.search.tag.silent_hidden': ['category:anime']}): assert settings['karakara.search.tag.silent_hidden'] == ['category:anime'] data = app.get('/search_list/.json').json['data'] assert len(data['trackids']) == 17 assert 't1' not in data['trackids'] assert not settings['karakara.search.tag.silent_forced'] assert not settings['karakara.search.tag.silent_hidden']
def test_two_step_flow(hass, client): """Test we can finish a two step flow.""" set_component( hass, 'test', MockModule('test', async_setup_entry=mock_coro_func(True))) class TestFlow(core_ce.ConfigFlow): VERSION = 1 @asyncio.coroutine def async_step_user(self, user_input=None): return self.async_show_form( step_id='account', data_schema=vol.Schema({ 'user_title': str })) @asyncio.coroutine def async_step_account(self, user_input=None): return self.async_create_entry( title=user_input['user_title'], data={'secret': 'account_token'} ) with patch.dict(HANDLERS, {'test': TestFlow}): resp = yield from client.post('/api/config/config_entries/flow', json={'handler': 'test'}) assert resp.status == 200 data = yield from resp.json() flow_id = data.pop('flow_id') assert data == { 'type': 'form', 'handler': 'test', 'step_id': 'account', 'data_schema': [ { 'name': 'user_title', 'type': 'string' } ], 'description_placeholders': None, 'errors': None } with patch.dict(HANDLERS, {'test': TestFlow}): resp = yield from client.post( '/api/config/config_entries/flow/{}'.format(flow_id), json={'user_title': 'user-title'}) assert resp.status == 200 data = yield from resp.json() data.pop('flow_id') assert data == { 'handler': 'test', 'type': 'create_entry', 'title': 'user-title', 'version': 1, 'description': None, 'description_placeholders': None, }
def test_dict_context_manager(self): foo = {} with patch.dict(foo, {'a': 'b'}): self.assertEqual(foo, {'a': 'b'}) self.assertEqual(foo, {}) with self.assertRaises(NameError), patch.dict(foo, {'a': 'b'}): self.assertEqual(foo, {'a': 'b'}) raise NameError('Konrad') self.assertEqual(foo, {})
def test_cookie_secret_env(tmpdir): hub = MockHub(cookie_secret_file=str(tmpdir.join('cookie_secret'))) with patch.dict(os.environ, {'JPY_COOKIE_SECRET': 'not hex'}): with pytest.raises(ValueError): hub.init_secrets() with patch.dict(os.environ, {'JPY_COOKIE_SECRET': 'abc123'}): hub.init_secrets() assert hub.cookie_secret == binascii.a2b_hex('abc123') assert not os.path.exists(hub.cookie_secret_file)
def test_setup_hassio_no_additional_data(hass, aioclient_mock): """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON), \ patch.dict(os.environ, {'HASSIO_TOKEN': "123456"}): result = yield from async_setup_component(hass, 'hassio', { 'hassio': {}, }) assert result assert aioclient_mock.call_count == 3 assert aioclient_mock.mock_calls[-1][3]['X-HASSIO-KEY'] == "123456"
def hassio_env(): """Fixture to inject hassio env.""" with patch.dict(os.environ, {'HASSIO': "127.0.0.1"}), \ patch('homeassistant.components.hassio.HassIO.is_connected', Mock(return_value=mock_coro( {"result": "ok", "data": {}}))), \ patch.dict(os.environ, {'HASSIO_TOKEN': "123456"}), \ patch('homeassistant.components.hassio.HassIO.' 'get_homeassistant_info', Mock(side_effect=HassioAPIError())): yield
def test_dict_context_manager(self): foo = {} with patch.dict(foo, {"a": "b"}): self.assertEqual(foo, {"a": "b"}) self.assertEqual(foo, {}) with self.assertRaises(NameError): with patch.dict(foo, {"a": "b"}): self.assertEqual(foo, {"a": "b"}) raise NameError("Konrad") self.assertEqual(foo, {})
def test_wrapper_workspace(check_call, makedirs): with patch.dict(os.environ, {'PROJECT_NAME': 'unittestproject', 'JOB_NAME': 'unittestjob'}): sanctify.wrapper_workspace(['--project', '--', 'job.sh']) workspace = os.path.expanduser('~/.sanctify/workspace/project/unittestproject') eq_(workspace, makedirs.call_args[0][0]) eq_(['job.sh'], check_call.call_args[0][0]) eq_(workspace, check_call.call_args[1]['cwd']) with patch.dict(os.environ, {'PROJECT_NAME': 'unittestproject', 'JOB_NAME': 'unittestjob'}): sanctify.wrapper_workspace(['--job', '--', 'job.sh']) eq_(os.path.expanduser('~/.sanctify/workspace/job/unittestproject/unittestjob'), makedirs.call_args[0][0]) eq_(['job.sh'], check_call.call_args[0][0])
def test_cookie_secret_env(tmpdir, request): kwargs = {'cookie_secret_file': str(tmpdir.join('cookie_secret'))} ssl_enabled = getattr(request.module, "ssl_enabled", False) if ssl_enabled: kwargs['internal_certs_location'] = str(tmpdir) hub = MockHub(**kwargs) with patch.dict(os.environ, {'JPY_COOKIE_SECRET': 'not hex'}): with pytest.raises(ValueError): hub.init_secrets() with patch.dict(os.environ, {'JPY_COOKIE_SECRET': 'abc123'}): hub.init_secrets() assert hub.cookie_secret == binascii.a2b_hex('abc123') assert not os.path.exists(hub.cookie_secret_file)
def test_autocomplete(self): class TestCommand(Cli): def command_avocado(self, kwarg): pass argv = 'manage.py' test_cmd = TestCommand() with patch.dict('os.environ', {'IKTOMI_AUTO_COMPLETE':'1', 'COMP_WORDS':argv, 'COMP_CWORD':'1' }): out = get_io() with patch.object(sys, 'stdout', out): with self.assertRaises(SystemExit): manage(dict(fruit=test_cmd), argv.split()) self.assertEqual(u'fruit fruit:', out.getvalue()) argv = 'manage.py fr' test_cmd = TestCommand() with patch.dict('os.environ', {'IKTOMI_AUTO_COMPLETE':'1', 'COMP_WORDS':argv, 'COMP_CWORD':'1' }): out = get_io() with patch.object(sys, 'stdout', out): with self.assertRaises(SystemExit): manage(dict(fruit=test_cmd), argv.split()) self.assertEqual(u'fruit fruit:', out.getvalue()) argv = 'manage.py fruit:' test_cmd = TestCommand() with patch.dict('os.environ', {'IKTOMI_AUTO_COMPLETE':'1', 'COMP_WORDS':argv.replace(":", " : "), 'COMP_CWORD':'2' }): out = get_io() with patch.object(sys, 'stdout', out): with self.assertRaises(SystemExit): manage(dict(fruit=test_cmd), argv.split()) self.assertEqual(u'avocado', out.getvalue()) argv = 'manage.py fruit:av' test_cmd = TestCommand() with patch.dict('os.environ', {'IKTOMI_AUTO_COMPLETE':'1', 'COMP_WORDS':argv.replace(":", " : "), 'COMP_CWORD':'3' }): out = get_io() with patch.object(sys, 'stdout', out): with self.assertRaises(SystemExit): manage(dict(fruit=test_cmd), argv.split()) self.assertEqual(u'avocado', out.getvalue())
def setUp(self): """ Patch the DBus module :return: """ self.dbus_mock = MagicMock() self.dbus_exception_mock = MagicMock() self.dbus_service_mock = MagicMock() self.mainloop_mock = MagicMock() self.gobject_mock = MagicMock() modules = { 'dbus': self.dbus_mock, 'dbus.exceptions': self.dbus_exception_mock, 'dbus.service': self.dbus_service_mock, 'dbus.mainloop.glib': self.mainloop_mock, 'gi.repository': self.gobject_mock, } self.dbus_mock.Interface.return_value.GetManagedObjects.return_value = tests.obj_data.full_ubits self.dbus_mock.Interface.return_value.Get = mock_get self.dbus_mock.Interface.return_value.Set = mock_set self.dbus_mock.SystemBus = MagicMock() self.module_patcher = patch.dict('sys.modules', modules) self.module_patcher.start() from bluezero import eddystone_beacon self.module_under_test = eddystone_beacon
def test_type_covers(type_name, entity_id, state, attrs): """Test if cover types are associated correctly.""" mock_type = Mock() with patch.dict(TYPES, {type_name: mock_type}): entity_state = State(entity_id, state, attrs) get_accessory(None, None, entity_state, 2, {}) assert mock_type.called
def test_detectHit_allParameters(self): bge = Mock() sourcePosition = "sp" targetPosition = "tp" distance = "dist" filterProperty = "fp" normalDirection = "nd" excludeUnfiltered = False hitObject = "ho" hitPosition = "hp" hitNormal = "hn" mock = MagicMock( return_value=[hitObject, hitPosition, hitNormal]) bge.logic.getCurrentScene().active_camera.rayCast = mock with patch.dict('sys.modules', {'bge': bge}): from mbge import physics hit = physics.detectHit(sourcePosition, targetPosition, distance, property=filterProperty, normalDirection=normalDirection, xray=excludeUnfiltered) self.assertEqual(hit.object, hitObject) self.assertEqual(hit.position, hitPosition) self.assertEqual(hit.normal, hitNormal) bge.logic.getCurrentScene().active_camera.rayCast.assert_called_once_with( sourcePosition, targetPosition, distance, filterProperty, normalDirection, excludeUnfiltered, 0)
def test_maxFrames_get(self): bge = Mock() bge.logic.getMaxPhysicsFrame = Mock(return_value=5) with patch.dict('sys.modules', {'bge': bge}): from mbge import physics self.assertEqual(physics.maxFrames, 5)
def test_saving_and_loading(hass): """Test that we're saving and loading correctly.""" class TestFlow(data_entry_flow.FlowHandler): VERSION = 5 @asyncio.coroutine def async_step_init(self, user_input=None): return self.async_create_entry( title='Test Title', data={ 'token': 'abcd' } ) with patch.dict(config_entries.HANDLERS, {'test': TestFlow}): yield from hass.config_entries.flow.async_init('test') class Test2Flow(data_entry_flow.FlowHandler): VERSION = 3 @asyncio.coroutine def async_step_init(self, user_input=None): return self.async_create_entry( title='Test 2 Title', data={ 'username': '******' } ) json_path = 'homeassistant.util.json.open' with patch('homeassistant.config_entries.HANDLERS.get', return_value=Test2Flow), \ patch.object(config_entries, 'SAVE_DELAY', 0): yield from hass.config_entries.flow.async_init('test') with patch(json_path, mock_open(), create=True) as mock_write: # To trigger the call_later yield from asyncio.sleep(0, loop=hass.loop) # To execute the save yield from hass.async_block_till_done() # Mock open calls are: open file, context enter, write, context leave written = mock_write.mock_calls[2][1][0] # Now load written data in new config manager manager = config_entries.ConfigEntries(hass, {}) with patch('os.path.isfile', return_value=True), \ patch(json_path, mock_open(read_data=written), create=True): yield from manager.async_load() # Ensure same order for orig, loaded in zip(hass.config_entries.async_entries(), manager.async_entries()): assert orig.version == loaded.version assert orig.domain == loaded.domain assert orig.title == loaded.title assert orig.data == loaded.data assert orig.source == loaded.source
def test_add_entry_calls_setup_entry(hass, manager): """Test we call setup_config_entry.""" mock_setup_entry = MagicMock(return_value=mock_coro(True)) loader.set_component( 'comp', MockModule('comp', async_setup_entry=mock_setup_entry)) class TestFlow(data_entry_flow.FlowHandler): VERSION = 1 @asyncio.coroutine def async_step_init(self, user_input=None): return self.async_create_entry( title='title', data={ 'token': 'supersecret' }) with patch.dict(config_entries.HANDLERS, {'comp': TestFlow, 'beer': 5}): yield from manager.flow.async_init('comp') yield from hass.async_block_till_done() assert len(mock_setup_entry.mock_calls) == 1 p_hass, p_entry = mock_setup_entry.mock_calls[0][1] assert p_hass is hass assert p_entry.data == { 'token': 'supersecret' }
def test_connect_with_pkey(self, mock_serialization): mock_snowflake = Mock(name='mock_snowflake') mock_connector = mock_snowflake.connector mock_pkey = mock_serialization.load_pem_private_key.return_value = Mock(name='pkey') # need to patch this here so it can be imported in the function scope with patch.dict('sys.modules', snowflake=mock_snowflake): mock_connector.connect.return_value = 'OK' snowflake = SnowflakeDatabase(user='******', private_key_data='abcdefg', private_key_password='******', account='test_account', database='test_database') result = snowflake.connect() with self.subTest('returns connection'): self.assertEqual('OK', result) with self.subTest('connects with credentials'): mock_serialization.load_pem_private_key.assert_called_once_with(b'abcdefg', b'1234', backend=ANY) with self.subTest('connects with credentials'): mock_connector.connect.assert_called_once_with(user='******', password=None, account='test_account', database='test_database', private_key=mock_pkey.private_bytes.return_value, region=None, warehouse=None)
def test_comunity_processmedia_logs(app, settings): response = app.get('/comunity/processmedia_log', expect_errors=True) assert response.status_code == 403 login(app) multi_mock_open = MultiMockOpen() multi_mock_open.add_handler( 'processmedia.log', """ 2000-01-01 00:00:00,000 - __main__ - INFO - Info test 2001-01-01 00:00:00,000 - __main__ - WARNING - Warning test 2002-01-01 00:00:00,000 - __main__ - ERROR - Error test """ ) with patch.dict(settings, {'static.processmedia2.log': 'processmedia.log'}): # rrrrrrr - kind of a hack using ComunityTrack._open .. but it works .. with patch.object(ComunityTrack, '_open', multi_mock_open.open): response = app.get('/comunity/processmedia_log?levels=WARNING,ERROR', expect_errors=True) assert 'Info test' not in response.text assert 'Warning test' in response.text assert 'Error test' in response.text logout(app)
def test_constructor_loads_info_from_constant(): """Test non-dev mode loads info from SERVERS constant.""" hass = MagicMock(data={}) with patch.dict(cloud.SERVERS, { 'beer': { 'cognito_client_id': 'test-cognito_client_id', 'user_pool_id': 'test-user_pool_id', 'region': 'test-region', 'relayer': 'test-relayer', 'google_actions_sync_url': 'test-google_actions_sync_url', } }), patch('homeassistant.components.cloud.Cloud._fetch_jwt_keyset', return_value=mock_coro(True)): result = yield from cloud.async_setup(hass, { 'cloud': {cloud.CONF_MODE: 'beer'} }) assert result cl = hass.data['cloud'] assert cl.mode == 'beer' assert cl.cognito_client_id == 'test-cognito_client_id' assert cl.user_pool_id == 'test-user_pool_id' assert cl.region == 'test-region' assert cl.relayer == 'test-relayer' assert cl.google_actions_sync_url == 'test-google_actions_sync_url'
def test_env_token_badfile(self): """ If I try to reference a bad secretfile in the environment it should complain, even if a valid secretfile is specified in the config file. This should apply to the token secret too. """ with patch.dict('os.environ', {'authtkt_secretfile': "NOT_A_REAL_FILE_12345"}): self.assertRaises(FileNotFoundError, get_app, test_ini)
def test_agent_auth_unset(self): """Test connecting with no local keys and no ssh-agent configured""" with patch.dict(os.environ, HOME='xxx', SSH_AUTH_SOCK=''): with self.assertRaises(asyncssh.DisconnectError): yield from self.connect(username='******', known_hosts='.ssh/known_hosts')
def test_utcnow(self, mock_is_safe): """Test utcnow method.""" now = dt_util.utcnow() with patch.dict(template.ENV.globals, {'utcnow': lambda: now}): assert now.isoformat() == \ template.Template('{{ utcnow().isoformat() }}', self.hass).render()
async def test_get_progress_flow_unauth(hass, client, hass_admin_user): """Test we can can't query the API for result of flow.""" class TestFlow(core_ce.ConfigFlow): async def async_step_user(self, user_input=None): schema = OrderedDict() schema[vol.Required('username')] = str schema[vol.Required('password')] = str return self.async_show_form( step_id='user', data_schema=schema, errors={ 'username': '******' } ) with patch.dict(HANDLERS, {'test': TestFlow}): resp = await client.post('/api/config/config_entries/flow', json={'handler': 'test'}) assert resp.status == 200 data = await resp.json() hass_admin_user.groups = [] resp2 = await client.get( '/api/config/config_entries/flow/{}'.format(data['flow_id'])) assert resp2.status == 401
def test_agent_auth_failure(self): """Test failure connecting with ssh-agent authentication""" with patch.dict(os.environ, HOME='xxx'): with self.assertRaises(asyncssh.DisconnectError): yield from self.connect(username='******', agent_path='xxx', known_hosts='.ssh/known_hosts')
def test_get_progress_index(hass, client): """Test querying for the flows that are in progress.""" class TestFlow(core_ce.ConfigFlow): VERSION = 5 @asyncio.coroutine def async_step_hassio(self, info): return (yield from self.async_step_account()) @asyncio.coroutine def async_step_account(self, user_input=None): return self.async_show_form( step_id='account', ) with patch.dict(HANDLERS, {'test': TestFlow}): form = yield from hass.config_entries.flow.async_init( 'test', context={'source': 'hassio'}) resp = yield from client.get('/api/config/config_entries/flow') assert resp.status == 200 data = yield from resp.json() assert data == [ { 'flow_id': form['flow_id'], 'handler': 'test', 'context': {'source': 'hassio'} } ]
def _init_server(self): "Start the notebook server in a separate process" self.server_command = command = [sys.executable, '-m', 'notebook', '--no-browser', '--notebook-dir', self.nbdir.name, '--NotebookApp.base_url=%s' % self.base_url, ] # ipc doesn't work on Windows, and darwin has crazy-long temp paths, # which run afoul of ipc's maximum path length. if sys.platform.startswith('linux'): command.append('--KernelManager.transport=ipc') self.stream_capturer = c = StreamCapturer() c.start() env = os.environ.copy() env.update(self.env) if self.engine == 'phantomjs': env['IPYTHON_ALLOW_DRAFT_WEBSOCKETS_FOR_PHANTOMJS'] = '1' self.server = subprocess.Popen(command, stdout = c.writefd, stderr = subprocess.STDOUT, cwd=self.nbdir.name, env=env, ) with patch.dict('os.environ', {'HOME': self.home.name}): runtime_dir = jupyter_runtime_dir() self.server_info_file = os.path.join(runtime_dir, 'nbserver-%i.json' % self.server.pid ) self._wait_for_server()
def test_create_account(hass, client): """Test a flow that creates an account.""" set_component( hass, 'test', MockModule('test', async_setup_entry=mock_coro_func(True))) class TestFlow(core_ce.ConfigFlow): VERSION = 1 @asyncio.coroutine def async_step_user(self, user_input=None): return self.async_create_entry( title='Test Entry', data={'secret': 'account_token'} ) with patch.dict(HANDLERS, {'test': TestFlow}): resp = yield from client.post('/api/config/config_entries/flow', json={'handler': 'test'}) assert resp.status == 200 data = yield from resp.json() data.pop('flow_id') assert data == { 'handler': 'test', 'title': 'Test Entry', 'type': 'create_entry', 'version': 1, 'description': None, 'description_placeholders': None, }
def test_media_url(self): filename = 'test' # automatic version is set on 15mins granularity. mins_granularity = int(int(time.strftime('%M')) / 4) * 4 time_id = '%s%s' % (time.strftime('%Y%m%d%H%m'), mins_granularity) media_id = self.amazon.media_id(filename) self.assertEqual('%s/%s' % (time_id, filename), media_id) self.assertEqual( self.amazon.url_for_media(media_id), 'https://acname.s3-us-east-1.amazonaws.com/%s' % media_id ) sub = 'test-sub' settings = { 'AMAZON_S3_SUBFOLDER': sub, 'MEDIA_PREFIX': 'https://acname.s3-us-east-1.amazonaws.com/' + sub } with patch.dict(self.app.config, settings): media_id = self.amazon.media_id(filename) self.assertEqual('%s/%s' % (time_id, filename), media_id) path = '%s/%s' % (sub, media_id) self.assertEqual( self.amazon.url_for_media(media_id), 'https://acname.s3-us-east-1.amazonaws.com/%s' % path ) with patch.object(self.amazon, 'client') as s3: self.amazon.get(media_id) self.assertTrue(s3.get_object.called) self.assertEqual( s3.get_object.call_args[1], dict(Bucket='acname', Key=path) )
def test_get_progress_flow(hass, client): """Test we can query the API for same result as we get from init a flow.""" class TestFlow(core_ce.ConfigFlow): @asyncio.coroutine def async_step_user(self, user_input=None): schema = OrderedDict() schema[vol.Required('username')] = str schema[vol.Required('password')] = str return self.async_show_form( step_id='user', data_schema=schema, errors={ 'username': '******' } ) with patch.dict(HANDLERS, {'test': TestFlow}): resp = yield from client.post('/api/config/config_entries/flow', json={'handler': 'test'}) assert resp.status == 200 data = yield from resp.json() resp2 = yield from client.get( '/api/config/config_entries/flow/{}'.format(data['flow_id'])) assert resp2.status == 200 data2 = yield from resp2.json() assert data == data2
def test_runtime_dir_env(): rtd_env = 'runtime-dir' with patch.dict('os.environ', {'JUPYTER_RUNTIME_DIR': rtd_env}): runtime = jupyter_runtime_dir() assert runtime == rtd_env
def test_jupyter_config_path_prefer_env(): with patch.dict('os.environ', {'JUPYTER_PREFER_ENV_PATH': 'true'}): path = jupyter_config_path() assert path[0] == paths.ENV_CONFIG_PATH[0] assert path[1] == jupyter_config_dir()
def test_data_dir_env(): data_env = 'runtime-dir' with patch.dict('os.environ', {'JUPYTER_DATA_DIR': data_env}): data = jupyter_data_dir() assert data == data_env
def test_raw_input_list_cmd1(self, ignored): mocked_cmd = Mock() with patch.dict(self.pymp.commands, {'\l': mocked_cmd}): self.pymp.raw_input('>>> ') mocked_cmd.assert_called_once_with('global')
def test_searchEnv(self): with patch.dict(os.environ, self.env): self.assertEqual(bash('searchEnv BAR RRRR'), 'BARRRRR=bazzzzz')
def test_dataloader_np_import_error(self): with patch.dict('sys.modules', {'numpy': None}): loader = DataLoader([0, 1, 2], num_workers=2) iterator = iter(loader) self.assertIsInstance(iterator, _MultiProcessingDataLoaderIter)
async def test_options_flow_with_invalid_data(hass, client): """Test an options flow with invalid_data.""" mock_integration( hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) ) class TestFlow(core_ce.ConfigFlow): @staticmethod @callback def async_get_options_flow(config_entry): class OptionsFlowHandler(data_entry_flow.FlowHandler): async def async_step_init(self, user_input=None): return self.async_show_form( step_id="finish", data_schema=vol.Schema( { vol.Required( "choices", default=["invalid", "valid"] ): cv.multi_select({"valid": "Valid"}) } ), ) async def async_step_finish(self, user_input=None): return self.async_create_entry( title="Enable disable", data=user_input ) return OptionsFlowHandler() MockConfigEntry( domain="test", entry_id="test1", source="bla", ).add_to_hass(hass) entry = hass.config_entries.async_entries()[0] with patch.dict(HANDLERS, {"test": TestFlow}): url = "/api/config/config_entries/options/flow" resp = await client.post(url, json={"handler": entry.entry_id}) assert resp.status == HTTPStatus.OK data = await resp.json() flow_id = data.pop("flow_id") assert data == { "type": "form", "handler": "test1", "step_id": "finish", "data_schema": [ { "default": ["invalid", "valid"], "name": "choices", "options": {"valid": "Valid"}, "required": True, "type": "multi_select", } ], "description_placeholders": None, "errors": None, "last_step": None, } with patch.dict(HANDLERS, {"test": TestFlow}): resp = await client.post( f"/api/config/config_entries/options/flow/{flow_id}", json={"choices": ["valid", "invalid"]}, ) assert resp.status == HTTPStatus.BAD_REQUEST data = await resp.json() assert data == { "message": "User input malformed: invalid is not a valid option for " "dictionary value @ data['choices']" }
from unittest import TestCase from unittest.mock import MagicMock, call, patch with patch.dict( "light_emitting_desk.utils.conf", {"sectors": {"back-of-desk": [[0, 47]], "monitor": [[53, 60], [48, 52]]}}, clear=True, ): from light_emitting_desk.desk import Desk class TestDesk(TestCase): """Test the Desk.""" @patch.dict( "light_emitting_desk.utils.conf", {"sectors": {"back-of-desk": [[0, 47]], "monitor": [[53, 60], [48, 52]]}}, clear=True, ) def test_constructor(self): """Test it gets the right data.""" desk = Desk({"back-of-desk": [[0, 7]], "monitor": [[13, 11], [8, 10]]}) self.assertEqual( desk.sectors, [[0, 1, 2, 3, 4, 5, 6, 7], [13, 12, 11], [8, 9, 10]] ) self.assertEqual(desk.indeces, [0, 1, 2, 3, 4, 5, 6, 7, 13, 12, 11, 8, 9, 10]) def test_setting_lights(self): """Test we can set a light to a colour.""" desk = Desk({"foo": [[0, 19]]})
def test_Platform_android(self): with patch.dict('os.environ', {'ANDROID_ARGUMENT': ''}): pf = _get_platform() self.assertTrue(pf == 'android') self.assertNotIn('ANDROID_ARGUMENT', os.environ)
"""auxiliary file to check if the exceptionhook is configured properly used in test_logging.py """ import os import sys from pathlib import Path from unittest.mock import patch plugin_path = Path(__file__ + "/../../..") sys.path.append(str(plugin_path.resolve())) with patch.dict(os.environ): # logs must be shown for the test to work! if 'KRATOS_SALOME_PLUGIN_DISABLE_LOGGING' in os.environ: del os.environ['KRATOS_SALOME_PLUGIN_DISABLE_LOGGING'] if 'SALOME_ROOT_DIR' in os.environ: del os.environ['SALOME_ROOT_DIR'] import kratos_salome_plugin raise Exception("provocing error") # this should not be executed as execution should stop after the first exception! raise RuntimeError("This should not show up in the log!")
def test_searchEnvValues(self): with patch.dict(os.environ, self.env): self.assertEqual(bash('searchEnv.Values BAR RRRR'), 'bazzzzz')
def test_searchEnvKeys(self): with patch.dict(os.environ, self.env): self.assertEqual(bash('searchEnv.Keys BAR RRRR'), 'BARRRRR')
def test_Platform_ios(self): with patch.dict('os.environ', {'KIVY_BUILD': 'ios'}): pf = _get_platform() self.assertEqual(pf, 'ios') self.assertNotIn('KIVY_BUILD', os.environ)
def test_getEnv(self): with patch.dict(os.environ, self.env): self.assertEqual(bash('getEnv "FOOOOOO"'), 'barrrrr')
def test_Platform_android_with_p4a(self): with patch.dict('os.environ', {'P4A_BOOTSTRAP': 'sdl2'}): self.assertEqual(_get_platform(), 'android') self.assertNotIn('P4A_BOOTSTRAP', os.environ)
def test_Platform_android_with_android_argument(self): with patch.dict('os.environ', {'ANDROID_ARGUMENT': ''}): self.assertEqual(_get_platform(), 'android') self.assertNotIn('ANDROID_ARGUMENT', os.environ)
def mock_no_numpy(): with patch.dict("sys.modules", {"numpy": None}): yield scipy
def test_Platform_android(self): with patch.dict('os.environ', {'KIVY_BUILD': 'android'}): self.assertEqual(_get_platform(), 'android') self.assertNotIn('KIVY_BUILD', os.environ)
def test_aws_credentials_from_env_var(self, mock_boto3): with patch.dict(os.environ, {'AWS_REGION': 'us-west-1'}): aws_base_scaler = AwsBaseScaler(app_name, min_instances, max_instances) assert aws_base_scaler.aws_region == 'us-west-1'
def setup_method(self, method): vcap_string = json.dumps({'postgres': [{'credentials': {'uri': 'test-db-uri'}}]}) with patch.dict(os.environ, {'VCAP_SERVICES': vcap_string}): self.db_query_scaler = DbQueryScaler(app_name, min_instances, max_instances) self.db_query_scaler.query = 'foo'
#!/bin/env python import sys import unittest from unittest.mock import Mock from unittest.mock import patch from textwrap import dedent ats_mock = Mock() with patch.dict('sys.modules', {'ats': ats_mock}, autospec=True): import genie.parsergen from genie.parsergen import oper_fill from genie.parsergen import oper_check from genie.parsergen import oper_fill_tabular from genie.parsergen.examples.parsergen.pyAts import parsergen_demo_mkpg import xml.etree.ElementTree as ET from ats.topology import Device from genie.metaparser.util.exceptions import SchemaEmptyParserError from genie.libs.parser.ios.show_interface import \ ShowIpInterfaceBriefPipeVlan,\ ShowInterfaces, ShowIpInterface,\ ShowIpv6Interface, \ ShowInterfacesAccounting, \ ShowIpInterfaceBriefPipeIp class test_show_interface_parsergen(unittest.TestCase):
def test_rule_with_no_language_in_exception_list( invalid_rules: RulesRepository): s501 = invalid_rules.get_rule('S501') validate_rule_metadata(s501) with patch.dict(s501.generic_metadata, [('status', 'deprecated')]): validate_rule_metadata(s501)
async def test_two_step_options_flow(hass, client): """Test we can finish a two step options flow.""" mock_integration( hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) ) class TestFlow(core_ce.ConfigFlow): @staticmethod @callback def async_get_options_flow(config_entry): class OptionsFlowHandler(data_entry_flow.FlowHandler): async def async_step_init(self, user_input=None): return self.async_show_form( step_id="finish", data_schema=vol.Schema({"enabled": bool}) ) async def async_step_finish(self, user_input=None): return self.async_create_entry( title="Enable disable", data=user_input ) return OptionsFlowHandler() MockConfigEntry( domain="test", entry_id="test1", source="bla", ).add_to_hass(hass) entry = hass.config_entries.async_entries()[0] with patch.dict(HANDLERS, {"test": TestFlow}): url = "/api/config/config_entries/options/flow" resp = await client.post(url, json={"handler": entry.entry_id}) assert resp.status == HTTPStatus.OK data = await resp.json() flow_id = data.pop("flow_id") assert data == { "type": "form", "handler": "test1", "step_id": "finish", "data_schema": [{"name": "enabled", "type": "boolean"}], "description_placeholders": None, "errors": None, "last_step": None, } with patch.dict(HANDLERS, {"test": TestFlow}): resp = await client.post( f"/api/config/config_entries/options/flow/{flow_id}", json={"enabled": True}, ) assert resp.status == HTTPStatus.OK data = await resp.json() data.pop("flow_id") assert data == { "handler": "test1", "type": "create_entry", "title": "Enable disable", "version": 1, "description": None, "description_placeholders": None, }
def patch_dict(self, *args, **kwargs): patcher = patch.dict(*args, **kwargs) self.addCleanup(patcher.stop) return patcher.start()
async def test_two_step_flow(hass, client, enable_custom_integrations): """Test we can finish a two step flow.""" mock_integration( hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) ) mock_entity_platform(hass, "config_flow.test", None) class TestFlow(core_ce.ConfigFlow): VERSION = 1 async def async_step_user(self, user_input=None): return self.async_show_form( step_id="account", data_schema=vol.Schema({"user_title": str}) ) async def async_step_account(self, user_input=None): return self.async_create_entry( title=user_input["user_title"], data={"secret": "account_token"} ) with patch.dict(HANDLERS, {"test": TestFlow}): resp = await client.post( "/api/config/config_entries/flow", json={"handler": "test"} ) assert resp.status == HTTPStatus.OK data = await resp.json() flow_id = data.pop("flow_id") assert data == { "type": "form", "handler": "test", "step_id": "account", "data_schema": [{"name": "user_title", "type": "string"}], "description_placeholders": None, "errors": None, "last_step": None, } with patch.dict(HANDLERS, {"test": TestFlow}): resp = await client.post( f"/api/config/config_entries/flow/{flow_id}", json={"user_title": "user-title"}, ) assert resp.status == HTTPStatus.OK entries = hass.config_entries.async_entries("test") assert len(entries) == 1 data = await resp.json() data.pop("flow_id") assert data == { "handler": "test", "type": "create_entry", "title": "user-title", "version": 1, "result": { "disabled_by": None, "domain": "test", "entry_id": entries[0].entry_id, "source": core_ce.SOURCE_USER, "state": core_ce.ConfigEntryState.LOADED.value, "supports_options": False, "supports_remove_device": False, "supports_unload": False, "pref_disable_new_entities": False, "pref_disable_polling": False, "title": "user-title", "reason": None, }, "description": None, "description_placeholders": None, "options": {}, }
async def test_get_entries(hass, client): """Test get entries.""" with patch.dict(HANDLERS, clear=True): @HANDLERS.register("comp1") class Comp1ConfigFlow: """Config flow with options flow.""" @staticmethod @callback def async_get_options_flow(config_entry): """Get options flow.""" pass @classmethod @callback def async_supports_options_flow(cls, config_entry): """Return options flow support for this handler.""" return True hass.helpers.config_entry_flow.register_discovery_flow( "comp2", "Comp 2", lambda: None ) entry = MockConfigEntry( domain="comp1", title="Test 1", source="bla", ) entry.supports_unload = True entry.add_to_hass(hass) MockConfigEntry( domain="comp2", title="Test 2", source="bla2", state=core_ce.ConfigEntryState.SETUP_ERROR, reason="Unsupported API", ).add_to_hass(hass) MockConfigEntry( domain="comp3", title="Test 3", source="bla3", disabled_by=core_ce.ConfigEntryDisabler.USER, ).add_to_hass(hass) resp = await client.get("/api/config/config_entries/entry") assert resp.status == HTTPStatus.OK data = await resp.json() for entry in data: entry.pop("entry_id") assert data == [ { "domain": "comp1", "title": "Test 1", "source": "bla", "state": core_ce.ConfigEntryState.NOT_LOADED.value, "supports_options": True, "supports_remove_device": False, "supports_unload": True, "pref_disable_new_entities": False, "pref_disable_polling": False, "disabled_by": None, "reason": None, }, { "domain": "comp2", "title": "Test 2", "source": "bla2", "state": core_ce.ConfigEntryState.SETUP_ERROR.value, "supports_options": False, "supports_remove_device": False, "supports_unload": False, "pref_disable_new_entities": False, "pref_disable_polling": False, "disabled_by": None, "reason": "Unsupported API", }, { "domain": "comp3", "title": "Test 3", "source": "bla3", "state": core_ce.ConfigEntryState.NOT_LOADED.value, "supports_options": False, "supports_remove_device": False, "supports_unload": False, "pref_disable_new_entities": False, "pref_disable_polling": False, "disabled_by": core_ce.ConfigEntryDisabler.USER, "reason": None, }, ]
jupyter_config_dir, jupyter_data_dir, jupyter_runtime_dir, jupyter_path, jupyter_config_path, ENV_JUPYTER_PATH, secure_write, is_hidden, is_file_hidden ) from .mocking import darwin, windows, linux pjoin = os.path.join xdg_env = { 'XDG_CONFIG_HOME': '/tmp/xdg/config', 'XDG_DATA_HOME': '/tmp/xdg/data', 'XDG_RUNTIME_DIR': '/tmp/xdg/runtime', } xdg = patch.dict('os.environ', xdg_env) no_xdg = patch.dict('os.environ', { 'XDG_CONFIG_HOME': '', 'XDG_DATA_HOME': '', 'XDG_RUNTIME_DIR': '', }) appdata = patch.dict('os.environ', {'APPDATA': 'appdata'}) no_config_env = patch.dict('os.environ', { 'JUPYTER_CONFIG_DIR': '', 'JUPYTER_DATA_DIR': '', 'JUPYTER_RUNTIME_DIR': '', 'JUPYTER_PATH': '', })
def cast_mock(): """Mock pychromecast.""" with patch.dict('sys.modules', { 'pychromecast': MagicMock(), }): yield
def s3_client(self, monkeypatch): with patch.dict("sys.modules", {"boto3": MagicMock()}): with prefect.context(secrets=dict( AWS_CREDENTIALS=dict(ACCESS_KEY=1, SECRET_ACCESS_KEY=42))): with set_temporary_config({"cloud.use_local_secrets": True}): yield
def test_raw_input_edit_cmd(self, ignored): mocked_cmd = Mock() with patch.dict(self.pymp.commands, {'\e': mocked_cmd}): self.pymp.raw_input('>>> ') mocked_cmd.assert_called_once_with('code')