async def test_update_stale(hass): """Test stalled update.""" scanner = get_component(hass, 'device_tracker.test').SCANNER scanner.reset() scanner.come_home('DEV1') register_time = datetime(2015, 9, 15, 23, tzinfo=dt_util.UTC) scan_time = datetime(2015, 9, 15, 23, 1, tzinfo=dt_util.UTC) with patch('homeassistant.components.device_tracker.dt_util.utcnow', return_value=register_time): with assert_setup_component(1, device_tracker.DOMAIN): assert await async_setup_component(hass, device_tracker.DOMAIN, { device_tracker.DOMAIN: { CONF_PLATFORM: 'test', device_tracker.CONF_CONSIDER_HOME: 59, }}) await hass.async_block_till_done() assert STATE_HOME == \ hass.states.get('device_tracker.dev1').state scanner.leave_home('DEV1') with patch('homeassistant.components.device_tracker.dt_util.utcnow', return_value=scan_time): async_fire_time_changed(hass, scan_time) await hass.async_block_till_done() assert STATE_NOT_HOME == \ hass.states.get('device_tracker.dev1').state
def mock_nessclient(): """Mock the nessclient Client constructor. Replaces nessclient.Client with a Mock which always returns the same MagicMock() instance. """ _mock_instance = MagicMock(MockClient()) _mock_factory = MagicMock() _mock_factory.return_value = _mock_instance with MockDependency('nessclient'), \ patch('nessclient.Client', new=_mock_factory, create=True), \ patch('nessclient.ArmingState', new=MockArmingState): yield _mock_instance
async def testPOST(self): mocked_resp = {"new_data_version": 2} with asynctest.patch("aiohttp.request", fake_request(mocked_resp, 200)) as r: resp = await client.request("http://balrog.fake", "/api/scheduled_changes/1", method="POST", data={"when": 987654321}, loop=self.loop) self.assertEqual(r.csrf_resp.headers, {"X-CSRF-Token": "foo"}) self.assertEqual(json.loads(r.request_data), {"csrf_token": "foo", "when": 987654321}) self.assertEqual(mocked_resp, await resp.json())
def test_001b_request_no_json(self): @future_func def request(method, url, data, headers): eq_(method, 'get') eq_(url, 'url') eq_(data, '{"message": "text"}') eq_(headers, { 'Content-Type': 'application/json', 'Accept': 'application/json' }) m = Mock() m.status = 200 @future_func def to_json(): raise ValueError m.json = to_json @future_func def to_text(): return 'something' m.text = to_text return m with patch('aiohttp.request', request): response = self.loop.run_until_complete( self.bus._execute_request('url', 'get', {'message': 'text'}) ) eq_(response.status, 200) eq_(response.json, None) eq_(response.text().result(), 'something')
async def test_with_context_manager(self): client = asynctest.Mock(AsyncClient()) cache = {} with asynctest.patch("logging.debug") as debug_mock: await cache_users_async(client, cache) debug_mock.assert_called()
async def test_lights_on_when_sun_sets(hass, scanner): """Test lights go on when there is someone home and the sun sets.""" test_time = datetime(2017, 4, 5, 1, 2, 3, tzinfo=dt_util.UTC) with patch('homeassistant.util.dt.utcnow', return_value=test_time): assert await async_setup_component( hass, device_sun_light_trigger.DOMAIN, { device_sun_light_trigger.DOMAIN: {}}) common_light.async_turn_off(hass) await hass.async_block_till_done() test_time = test_time.replace(hour=3) with patch('homeassistant.util.dt.utcnow', return_value=test_time): async_fire_time_changed(hass, test_time) await hass.async_block_till_done() assert light.is_on(hass)
async def test_loading_race_condition(hass): """Test only one storage load called when concurrent loading occurred .""" store = auth_store.AuthStore(hass) with asynctest.patch( 'homeassistant.helpers.entity_registry.async_get_registry', ) as mock_ent_registry, asynctest.patch( 'homeassistant.helpers.device_registry.async_get_registry', ) as mock_dev_registry, asynctest.patch( 'homeassistant.helpers.storage.Store.async_load', ) as mock_load: results = await asyncio.gather( store.async_get_users(), store.async_get_users(), ) mock_ent_registry.assert_called_once_with(hass) mock_dev_registry.assert_called_once_with(hass) mock_load.assert_called_once_with() assert results[0] == results[1]
async def test_normal(self, tmpdir, data): rf = readfile.ReadFileStdin() with taddons.context(rf) as tctx: tf = tmpdir.join("tfile") with asynctest.patch('mitmproxy.master.Master.load_flow') as mck: tf.write(data.getvalue()) tctx.configure(rf, rfile=str(tf)) assert not mck.awaited rf.running() await asyncio.sleep(0) assert mck.awaited
async def test_loading_race_condition(hass): """Test only one storage load called when concurrent loading occurred .""" with asynctest.patch( 'homeassistant.helpers.entity_registry.EntityRegistry.async_load', ) as mock_load: results = await asyncio.gather( entity_registry.async_get_registry(hass), entity_registry.async_get_registry(hass), ) mock_load.assert_called_once_with() assert results[0] == results[1]
async def test_stdin(self, stdin, data, corrupt_data): rf = readfile.ReadFileStdin() with taddons.context(rf): with asynctest.patch('mitmproxy.master.Master.load_flow') as mck: stdin.buffer = data assert not mck.awaited await rf.load_flows(stdin.buffer) assert mck.awaited stdin.buffer = corrupt_data with pytest.raises(exceptions.FlowReadException): await rf.load_flows(stdin.buffer)
async def test_resume_data_save_loop(core, frequency): """ Verifies save resume data is called, and called as often as specified in config. """ core_run_time = 0.61 expected_runs = int(core_run_time / frequency) core.config['resume_data_save_frequency'] = frequency with patch('spritzle.daemon.resume_data.ResumeData.save_all') as mock_save: await core.start() await asyncio.sleep(core_run_time) # Allow a bit of slop so the test isn't so fragile assert expected_runs - 1 <= mock_save.call_count <= expected_runs + 1 await core.stop()
async def testGET(self): mocked_resp = { "count": 2, "scheduled_changes": [ {"sc_id": 1, "when": 123456789}, {"sc_id": 2, "telemetry_product": "Firefox", "telemetry_channel": "release", "telemetry_uptake": 3000}, ], } with asynctest.patch("aiohttp.request", fake_request(mocked_resp, 200)) as r: resp = await client.request("http://balrog.fake", "/api/scheduled_changes", loop=self.loop) # GET requests shouldn't retrieve a CSRF token self.assertEqual(r.csrf_resp, None) self.assertEqual(json.loads(r.request_data), {}) self.assertEqual(mocked_resp, await resp.json())
async def test_user_flow(hass): """Test that config flow works.""" flow = config_flow.ZhaFlowHandler() flow.hass = hass with patch('homeassistant.components.zha.config_flow' '.check_zigpy_connection', return_value=False): result = await flow.async_step_user( user_input={'usb_path': '/dev/ttyUSB1', 'radio_type': 'ezsp'}) assert result['errors'] == {'base': 'cannot_connect'} with patch('homeassistant.components.zha.config_flow' '.check_zigpy_connection', return_value=True): result = await flow.async_step_user( user_input={'usb_path': '/dev/ttyUSB1', 'radio_type': 'ezsp'}) assert result['type'] == 'create_entry' assert result['title'] == '/dev/ttyUSB1' assert result['data'] == { 'usb_path': '/dev/ttyUSB1', 'radio_type': 'ezsp' }
def test_001c_request_error(self): error = { 'endpoint': 'http://localhost:8080/None/api/url', 'error': 'ClientOSError()', 'data': {'message': 'text'} } with patch('aiohttp.request', side_effect=ClientOSError): with patch.object(self.bus, 'publish') as publish: exc = self.loop.run_until_complete(self.bus.request( None, 'url', 'get', data={'message': 'text'} )) publish.assert_called_once_with(error) ok_(isinstance(exc, ClientOSError))
def test_001_copy_default(self): # Default conf file with open(self.default, 'w') as f: f.write('{"bus": {"jid": "test@localhost", "password": "******"}}') # Our conf file does not exist yet conf = os.path.join(self.dir.name, 'myconf.json') with patch('nyuki.config.DEFAULT_CONF_FILE', self.default): kwargs = {'config': conf} self.nyuki = Nyuki(**kwargs) # Check our conf is created from default with open(conf, 'r') as f: eq_(f.read(), '{"bus": {"jid": "test@localhost", "password": "******"}}')
async def test_lights_turn_on_when_coming_home_after_sun_set(hass, scanner): """Test lights turn on when coming home after sun set.""" test_time = datetime(2017, 4, 5, 3, 2, 3, tzinfo=dt_util.UTC) with patch('homeassistant.util.dt.utcnow', return_value=test_time): common_light.async_turn_off(hass) await hass.async_block_till_done() assert await async_setup_component( hass, device_sun_light_trigger.DOMAIN, { device_sun_light_trigger.DOMAIN: {}}) hass.states.async_set( device_tracker.ENTITY_ID_FORMAT.format('device_2'), STATE_HOME) await hass.async_block_till_done() assert light.is_on(hass)
async def test_ensure_device_tracker_platform_validation(hass): """Test if platform validation was done.""" async def mock_setup_scanner(hass, config, see, discovery_info=None): """Check that Qos was added by validation.""" assert 'qos' in config with patch('homeassistant.components.mqtt.device_tracker.' 'async_setup_scanner', autospec=True, side_effect=mock_setup_scanner) as mock_sp: dev_id = 'paulus' topic = '/location/paulus' assert await async_setup_component(hass, device_tracker.DOMAIN, { device_tracker.DOMAIN: { CONF_PLATFORM: 'mqtt', 'devices': {dev_id: topic} } }) assert mock_sp.call_count == 1
async def test_read(self, tmpdir, data, corrupt_data): rf = readfile.ReadFile() with taddons.context(rf) as tctx: tf = tmpdir.join("tfile") with asynctest.patch('mitmproxy.master.Master.load_flow') as mck: tf.write(data.getvalue()) tctx.configure( rf, rfile = str(tf), readfile_filter = ".*" ) assert not mck.awaited rf.running() await asyncio.sleep(0) assert mck.awaited tf.write(corrupt_data.getvalue()) tctx.configure(rf, rfile=str(tf)) rf.running() assert await tctx.master.await_log("corrupted")
def scanner(hass): """Initialize components.""" scanner = loader.get_component( hass, 'device_tracker.test').get_scanner(None, None) scanner.reset() scanner.come_home('DEV1') loader.get_component(hass, 'light.test').init() with patch( 'homeassistant.components.device_tracker.load_yaml_config_file', return_value={ 'device_1': { 'hide_if_away': False, 'mac': 'DEV1', 'name': 'Unnamed Device', 'picture': 'http://example.com/dev1.jpg', 'track': True, 'vendor': None }, 'device_2': { 'hide_if_away': False, 'mac': 'DEV2', 'name': 'Unnamed Device', 'picture': 'http://example.com/dev2.jpg', 'track': True, 'vendor': None} }): assert hass.loop.run_until_complete(async_setup_component( hass, device_tracker.DOMAIN, { device_tracker.DOMAIN: {CONF_PLATFORM: 'test'} })) assert hass.loop.run_until_complete(async_setup_component( hass, light.DOMAIN, { light.DOMAIN: {CONF_PLATFORM: 'test'} })) return scanner
async def test_sending_mqtt_commands_and_optimistic(hass, mock_publish): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State('switch.test', 'on') with patch('homeassistant.helpers.restore_state.RestoreEntity' '.async_get_last_state', return_value=mock_coro(fake_state)): assert await async_setup_component(hass, switch.DOMAIN, { switch.DOMAIN: { 'platform': 'mqtt', 'name': 'test', 'command_topic': 'command-topic', 'payload_on': 'beer on', 'payload_off': 'beer off', 'qos': '2' } }) state = hass.states.get('switch.test') assert STATE_ON == state.state assert state.attributes.get(ATTR_ASSUMED_STATE) common.turn_on(hass, 'switch.test') await hass.async_block_till_done() mock_publish.async_publish.assert_called_once_with( 'command-topic', 'beer on', 2, False) mock_publish.async_publish.reset_mock() state = hass.states.get('switch.test') assert STATE_ON == state.state common.turn_off(hass, 'switch.test') await hass.async_block_till_done() await hass.async_block_till_done() mock_publish.async_publish.assert_called_once_with( 'command-topic', 'beer off', 2, False) state = hass.states.get('switch.test') assert STATE_OFF == state.state
def _test_method(self, method, params = None, result = None): with asynctest.patch('mymcadmin.rpc.RpcClient.execute_rpc_method') as \ execute_rpc_method: execute_rpc_method.return_value = result client = RpcClient(self.host, self.port) method_func = getattr(client, method) if params is not None: response = method_func(**params) execute_rpc_method.assert_called_with(method, params) else: response = method_func() execute_rpc_method.assert_called_with(method) self.assertEqual( result, response, 'Client did not return the expected result', )
def test_001a_request(self): @future_func def request(method, url, data, headers): eq_(method, 'get') eq_(url, 'url') eq_(data, '{"message": "text"}') eq_(headers, { 'Content-Type': 'application/json', 'Accept': 'application/json' }) m = Mock() m.status = 200 @future_func def to_json(): return {'response': 'text'} m.json = to_json return m with patch('aiohttp.request', request): response = self.loop.run_until_complete( self.bus._execute_request('url', 'get', {'message': 'text'}) ) eq_(response.status, 200) eq_(response.json, {'response': 'text'})
async def test_batch_get_item(self): """ TableConnection.batch_get_item """ items = [] conn = TableConnection(self.test_table_name) for i in range(10): items.append({ "ForumName": "FooForum", "Subject": "thread-{0}".format(i) }) with patch(PATCH_METHOD) as req: req.return_value = DESCRIBE_TABLE_DATA await conn.describe_table() with patch(PATCH_METHOD) as req: req.return_value = {} await conn.batch_get_item(items) params = { 'ReturnConsumedCapacity': 'TOTAL', 'RequestItems': { self.test_table_name: { 'Keys': [{ 'ForumName': { 'S': 'FooForum' }, 'Subject': { 'S': 'thread-0' } }, { 'ForumName': { 'S': 'FooForum' }, 'Subject': { 'S': 'thread-1' } }, { 'ForumName': { 'S': 'FooForum' }, 'Subject': { 'S': 'thread-2' } }, { 'ForumName': { 'S': 'FooForum' }, 'Subject': { 'S': 'thread-3' } }, { 'ForumName': { 'S': 'FooForum' }, 'Subject': { 'S': 'thread-4' } }, { 'ForumName': { 'S': 'FooForum' }, 'Subject': { 'S': 'thread-5' } }, { 'ForumName': { 'S': 'FooForum' }, 'Subject': { 'S': 'thread-6' } }, { 'ForumName': { 'S': 'FooForum' }, 'Subject': { 'S': 'thread-7' } }, { 'ForumName': { 'S': 'FooForum' }, 'Subject': { 'S': 'thread-8' } }, { 'ForumName': { 'S': 'FooForum' }, 'Subject': { 'S': 'thread-9' } }] } } } self.assertEqual(req.call_args[0][1], params)
def volume_fixture(): """Mock the device volume.""" volume_patch = patch("libsoundtouch.device.SoundTouchDevice.volume") with volume_patch as volume: yield volume
async def test_dump_data(hass): """Test that we cache data.""" states = [ State("input_boolean.b0", "on"), State("input_boolean.b1", "on"), State("input_boolean.b2", "on"), ] entity = Entity() entity.hass = hass entity.entity_id = "input_boolean.b0" await entity.async_internal_added_to_hass() entity = RestoreEntity() entity.hass = hass entity.entity_id = "input_boolean.b1" await entity.async_internal_added_to_hass() data = await RestoreStateData.async_get_instance(hass) now = dt_util.utcnow() data.last_states = { "input_boolean.b0": StoredState(State("input_boolean.b0", "off"), now), "input_boolean.b1": StoredState(State("input_boolean.b1", "off"), now), "input_boolean.b2": StoredState(State("input_boolean.b2", "off"), now), "input_boolean.b3": StoredState(State("input_boolean.b3", "off"), now), "input_boolean.b4": StoredState( State("input_boolean.b4", "off"), datetime(1985, 10, 26, 1, 22, tzinfo=dt_util.UTC), ), } with patch("homeassistant.helpers.restore_state.Store.async_save" ) as mock_write_data, patch.object(hass.states, "async_all", return_value=states): await data.async_dump_states() assert mock_write_data.called args = mock_write_data.mock_calls[0][1] written_states = args[0] # b0 should not be written, since it didn't extend RestoreEntity # b1 should be written, since it is present in the current run # b2 should not be written, since it is not registered with the helper # b3 should be written, since it is still not expired # b4 should not be written, since it is now expired assert len(written_states) == 2 assert written_states[0]["state"]["entity_id"] == "input_boolean.b1" assert written_states[0]["state"]["state"] == "on" assert written_states[1]["state"]["entity_id"] == "input_boolean.b3" assert written_states[1]["state"]["state"] == "off" # Test that removed entities are not persisted await entity.async_remove() with patch("homeassistant.helpers.restore_state.Store.async_save" ) as mock_write_data, patch.object(hass.states, "async_all", return_value=states): await data.async_dump_states() assert mock_write_data.called args = mock_write_data.mock_calls[0][1] written_states = args[0] assert len(written_states) == 1 assert written_states[0]["state"]["entity_id"] == "input_boolean.b3" assert written_states[0]["state"]["state"] == "off"
async def test_update_item(self): """ TableConnection.update_item """ conn = TableConnection(self.test_table_name) with patch(PATCH_METHOD) as req: req.return_value = DESCRIBE_TABLE_DATA await conn.describe_table() attr_updates = { 'Subject': { 'Value': 'foo-subject', 'Action': 'PUT' }, } with patch(PATCH_METHOD) as req: req.return_value = HttpOK(), {} await conn.update_item( 'foo-key', actions=[Path('Subject').set('foo-subject')], range_key='foo-range-key', ) params = { 'Key': { 'ForumName': { 'S': 'foo-key' }, 'Subject': { 'S': 'foo-range-key' } }, 'UpdateExpression': 'SET #0 = :0', 'ExpressionAttributeNames': { '#0': 'Subject' }, 'ExpressionAttributeValues': { ':0': { 'S': 'foo-subject' } }, 'ReturnConsumedCapacity': 'TOTAL', 'TableName': 'ci-table' } self.assertEqual(req.call_args[0][1], params) with patch(PATCH_METHOD) as req: req.return_value = HttpOK(), {} await conn.update_item( 'foo-key', attribute_updates=attr_updates, range_key='foo-range-key', ) params = { 'Key': { 'ForumName': { 'S': 'foo-key' }, 'Subject': { 'S': 'foo-range-key' } }, 'UpdateExpression': 'SET #0 = :0', 'ExpressionAttributeNames': { '#0': 'Subject' }, 'ExpressionAttributeValues': { ':0': { 'S': 'foo-subject' } }, 'ReturnConsumedCapacity': 'TOTAL', 'TableName': 'ci-table' } self.assertEqual(req.call_args[0][1], params)
def decorator(self, mocker, mock_cache): with patch("aiocache.decorators._get_cache", return_value=mock_cache): yield cached()
async def test_lights_turn_on_when_coming_home_after_sun_set_person(hass, scanner): """Test lights turn on when coming home after sun set.""" device_1 = DT_ENTITY_ID_FORMAT.format("device_1") device_2 = DT_ENTITY_ID_FORMAT.format("device_2") test_time = datetime(2017, 4, 5, 3, 2, 3, tzinfo=dt_util.UTC) with patch("homeassistant.util.dt.utcnow", return_value=test_time): await common_light.async_turn_off(hass) hass.states.async_set(device_1, STATE_NOT_HOME) hass.states.async_set(device_2, STATE_NOT_HOME) await hass.async_block_till_done() assert all( not light.is_on(hass, ent_id) for ent_id in hass.states.async_entity_ids("light") ) assert hass.states.get(device_1).state == "not_home" assert hass.states.get(device_2).state == "not_home" assert await async_setup_component( hass, "person", {"person": [{"id": "me", "name": "Me", "device_trackers": [device_1]}]}, ) await group.Group.async_create_group(hass, "person_me", ["person.me"]) assert await async_setup_component( hass, device_sun_light_trigger.DOMAIN, {device_sun_light_trigger.DOMAIN: {"device_group": "group.person_me"}}, ) assert all( not light.is_on(hass, ent_id) for ent_id in hass.states.async_entity_ids("light") ) assert hass.states.get(device_1).state == "not_home" assert hass.states.get(device_2).state == "not_home" assert hass.states.get("person.me").state == "not_home" # Unrelated device has no impact hass.states.async_set(device_2, STATE_HOME) await hass.async_block_till_done() assert all( not light.is_on(hass, ent_id) for ent_id in hass.states.async_entity_ids("light") ) assert hass.states.get(device_1).state == "not_home" assert hass.states.get(device_2).state == "home" assert hass.states.get("person.me").state == "not_home" # person home switches on hass.states.async_set(device_1, STATE_HOME) await hass.async_block_till_done() await hass.async_block_till_done() assert all( light.is_on(hass, ent_id) for ent_id in hass.states.async_entity_ids("light") ) assert hass.states.get(device_1).state == "home" assert hass.states.get(device_2).state == "home" assert hass.states.get("person.me").state == "home"
async def test_get_controller_verify_ssl_false(hass): """Successful call with verify ssl set to false.""" controller_data = dict(CONTROLLER_DATA) controller_data[CONF_VERIFY_SSL] = False with patch("aiounifi.Controller.login", return_value=Mock()): assert await unifi.controller.get_controller(hass, **controller_data)
async def test_get_controller_login_failed(hass): """Check that get_controller can handle a failed login.""" with patch("aiounifi.Controller.login", side_effect=aiounifi.Unauthorized), pytest.raises( unifi.errors.AuthenticationRequired): await unifi.controller.get_controller(hass, **CONTROLLER_DATA)
def save_history_songs_mock(): with am.patch('mosbot.command.save_history_songs') as m: yield m
def mock_datetime(self): with asynctest.patch("paasta_tools.api.views.instance.datetime", autospec=True) as mock_datetime: mock_datetime.datetime.now.return_value = datetime.datetime( 2019, 6, 18, 22, 14, 0) yield
def save_bot_data_mock(): with am.patch('mosbot.command.save_bot_data') as m: yield m
async def test_shipToDataDog_basic(capfd): with patch('datadog.api.Metric.send') as mocked_get: response = await datadog.shipToDataDog('yes', 'on') assert mocked_get.call_count == 0 # ship method isn't called assert response is None # Simple metrics test - Payload empty from Instaclustr. metrics = [ { "id": "00000000-0000-0000-0000-000000000001", "payload": [], "publicIp": "", "privateIp": "10.0.0.2", "rack": { "name": "ap-southeast-2a", "dataCentre": { "name": "AP_SOUTHEAST_2", "provider": "AWS_VPC", "customDCName": "KAFKA_VPC_DEVELOPMENT" }, "providerAccount": { "name": "Lendi AWS Account", "provider": "AWS_VPC" } } }] response = await datadog.shipToDataDog('my_test_cluster', 'on', ic_tags=[], metrics=metrics) captured = capfd.readouterr() assert mocked_get.call_count == 0 # ship method isn't called assert 'Empty list from the instaclustr API for the cluster: my_test_cluster' in captured.out # Simple metrics test - DD Response != 'ok' mocked_get.return_value = { "status": 'Not_OK' } metrics = [ { "id": "00000000-0000-0000-0000-000000000001", "payload": [ { "metric": "slaConsumerRecordsProcessed", "type": "count", "unit": "1", "values": [ { "value": "30.0", "time": "2019-09-03T00:48:05.000Z" } ] }, { "metric": "cpuUtilization", "type": "percentage", "unit": "1", "values": [ { "value": "1.61892901618929", "time": "2019-09-03T00:48:05.000Z" } ] } ], "publicIp": "", "privateIp": "10.0.0.2", "rack": { "name": "ap-southeast-2a", "dataCentre": { "name": "AP_SOUTHEAST_2", "provider": "AWS_VPC", "customDCName": "KAFKA_VPC_DEVELOPMENT" }, "providerAccount": { "name": "Lendi AWS Account", "provider": "AWS_VPC" } } }] response = await datadog.shipToDataDog('yes', 'on', ic_tags=[], metrics=metrics) captured = capfd.readouterr() mocked_get.assert_called_once_with( [ { "metric": "on.slaConsumerRecordsProcessed.count", "points": [ ( 1567471685, 30.0 ) ], "tags": [ "ic_node_id:00000000-0000-0000-0000-000000000001", "ic_cluster_id:yes", "ic_private_ip:10.0.0.2", "ic_rack_name:ap-southeast-2a", "ic_data_centre_custom_name:KAFKA_VPC_DEVELOPMENT", "ic_data_centre_name:AP_SOUTHEAST_2", "ic_data_centre_provider:AWS_VPC", "ic_provider_account_name:Lendi AWS Account", "ic_provider_account_provider:AWS_VPC", "region:ap-southeast-2", "availability_zone:ap-southeast-2a" ] }, { "metric": "on.cpuUtilization.percentage", "points": [ ( 1567471685, 1.61892901618929 ) ], "tags": [ "ic_node_id:00000000-0000-0000-0000-000000000001", "ic_cluster_id:yes", "ic_private_ip:10.0.0.2", "ic_rack_name:ap-southeast-2a", "ic_data_centre_custom_name:KAFKA_VPC_DEVELOPMENT", "ic_data_centre_name:AP_SOUTHEAST_2", "ic_data_centre_provider:AWS_VPC", "ic_provider_account_name:Lendi AWS Account", "ic_provider_account_provider:AWS_VPC", "region:ap-southeast-2", "availability_zone:ap-southeast-2a" ] } ]) assert 'Error sending metrics to DataDog: ' in captured.out assert response is None
async def test_shipToDataDog_topics(capfd): with patch('datadog.api.Metric.send') as mocked_get: # Simple metrics test - DD Response == 'ok metrics = [ { "consumerGroup": "group-20", "topic": "test1", "clientID": "client-2", "payload": [ { "metric": "consumerLag", "type": "count", "unit": "messages", "values": [ { "value": "30.0", "time": "2019-09-17T11:38:59.000Z" } ] }, { "metric": "consumerCount", "type": "count", "unit": "consumers", "values": [ { "value": "1.0", "time": "2019-09-17T11:38:59.000Z" } ] } ] } ] mocked_get.return_value = { "status": 'ok' } response = await datadog.shipToDataDog('yes', 'on', ic_tags=[], metrics=metrics) mocked_get.assert_called_once_with( [ { "metric": "on.consumerLag.count", "points": [ ( 1568720339, 30.0 ) ], "tags": [ "ic_cluster_id:yes", "topic:test1", "consumerGroup:group-20", "clientID:client-2" ] }, { "metric": "on.consumerCount.count", "points": [ ( 1568720339, 1.0 ) ], "tags": [ "ic_cluster_id:yes", "topic:test1", "consumerGroup:group-20", "clientID:client-2" ] } ] ) assert response == 'ok' mocked_get.reset_mock() metrics = [ { "consumerGroup": "group-20", "topic": "test1", "payload": [ { "metric": "consumerGroupLag", "type": "count", "unit": "messages", "values": [ { "value": "30.0", "time": "2019-09-17T11:52:45.000Z" } ] }, { "metric": "clientCount", "type": "count", "unit": "clients", "values": [ { "value": "1.0", "time": "2019-09-17T11:52:45.000Z" } ] } ] } ] response = await datadog.shipToDataDog('yes', 'on', ic_tags=[], metrics=metrics) mocked_get.assert_called_once_with( [ { "metric": "on.consumerGroupLag.count", "points": [ ( 1568721165, 30.0 ) ], "tags": [ "ic_cluster_id:yes", "topic:test1", "consumerGroup:group-20" ] }, { "metric": "on.clientCount.count", "points": [ ( 1568721165, 1.0 ) ], "tags": [ "ic_cluster_id:yes", "topic:test1", "consumerGroup:group-20" ] } ] )
async def _do_method(self, version = None, forge = None, forge_installer = None, forge_args = None): server_id = 'testification' server_path = os.path.join(self.root, server_id) with unittest.mock.patch('os.path.exists') as exists, \ unittest.mock.patch('mymcadmin.server.Server') as server, \ unittest.mock.patch('os.mkdir') as mkdir, \ asynctest.patch('asyncio.create_subprocess_exec') as subproc: exists.return_value = False server.download_server_jar.return_value = 'minecraft-{}.jar'.format( version or 'latest', ) mock_proc = asynctest.Mock(spec = asyncio.subprocess.Process) server.return_value = server server.settings = {} server.start = asynctest.CoroutineMock() server.start.return_value = mock_proc if forge_installer: installer_path = os.path.join( server_path, 'forge-{}-latest-installer.jar'.format(version), ) forge_path = os.path.join( server_path, 'forge-{}-latest-universal.jar'.format(version), ) forge_installer.return_value = (installer_path, forge_path) subproc.return_value = subproc if forge_args is None: forge_args = [] result = await self.manager.rpc_command_server_create( server_id = server_id, version = version, forge = forge, ) self.assertEqual( server_id, result, 'JSON RPC resonse did not match expected', ) mkdir.assert_called_with(server_path) server.download_server_jar.assert_called_with( version, path = server_path, ) server.assert_called_with(server_path) mock_proc.wait.assert_called_with() server.agree_to_eula.assert_called_with( path = server_path, ) if forge_installer: forge_installer.assert_called_with( version, *forge_args, path = server_path, ) subproc.assert_called_with( server.java, '-jar', installer_path, '--installServer', cwd = server_path, stdin = asyncio.subprocess.PIPE, stdout = asyncio.subprocess.PIPE, stderr = asyncio.subprocess.PIPE, ) subproc.wait.assert_called_with() self.assertDictEqual( {'jar': os.path.basename(forge_path)}, server.settings, 'Settings were not updated', ) server.save_settings.assert_called_with()
async def test_get_controller_controller_unavailable(hass): """Check that get_controller can handle controller being unavailable.""" with patch("aiounifi.Controller.login", side_effect=aiounifi.RequestError), pytest.raises( unifi.errors.CannotConnect): await unifi.controller.get_controller(hass, **CONTROLLER_DATA)
async def test_shipToDataDog_complex(capfd): with patch('datadog.api.Metric.send') as mocked_get: # Simple metrics test - DD Response == 'ok metrics = [ { "id": "00000000-0000-0000-0000-000000000001", "payload": [ { "metric": "slaConsumerRecordsProcessed", "type": "count", "unit": "1", "values": [ { "value": "30.0", "time": "2019-09-03T00:48:05.000Z" } ] }, { "metric": "cpuUtilization", "type": "percentage", "unit": "1", "values": [ { "value": "1.61892901618929", "time": "2019-09-03T00:48:05.000Z" } ] } ], "publicIp": "", "privateIp": "10.0.0.2", "rack": { "name": "ap-southeast-2a", "dataCentre": { "name": "AP_SOUTHEAST_2", "provider": "AWS_VPC", "customDCName": "KAFKA_VPC_DEVELOPMENT" }, "providerAccount": { "name": "Lendi AWS Account", "provider": "AWS_VPC" } } }] mocked_get.return_value = { "status": 'ok' } response = await datadog.shipToDataDog('yes', 'on', ic_tags=[], metrics=metrics) mocked_get.assert_called_once_with( [ { "metric": "on.slaConsumerRecordsProcessed.count", "points": [ ( 1567471685, 30.0 ) ], "tags": [ "ic_node_id:00000000-0000-0000-0000-000000000001", "ic_cluster_id:yes", "ic_private_ip:10.0.0.2", "ic_rack_name:ap-southeast-2a", "ic_data_centre_custom_name:KAFKA_VPC_DEVELOPMENT", "ic_data_centre_name:AP_SOUTHEAST_2", "ic_data_centre_provider:AWS_VPC", "ic_provider_account_name:Lendi AWS Account", "ic_provider_account_provider:AWS_VPC", "region:ap-southeast-2", "availability_zone:ap-southeast-2a" ] }, { "metric": "on.cpuUtilization.percentage", "points": [ ( 1567471685, 1.61892901618929 ) ], "tags": [ "ic_node_id:00000000-0000-0000-0000-000000000001", "ic_cluster_id:yes", "ic_private_ip:10.0.0.2", "ic_rack_name:ap-southeast-2a", "ic_data_centre_custom_name:KAFKA_VPC_DEVELOPMENT", "ic_data_centre_name:AP_SOUTHEAST_2", "ic_data_centre_provider:AWS_VPC", "ic_provider_account_name:Lendi AWS Account", "ic_provider_account_provider:AWS_VPC", "region:ap-southeast-2", "availability_zone:ap-southeast-2a" ] } ]) assert response == 'ok' # Throw exception from the api.Metric.send method mocked_get.side_effect = Exception('api.Metric.send failed to send to DD') response = await datadog.shipToDataDog('yes', 'on', ic_tags=[], metrics=metrics) captured = capfd.readouterr() assert mocked_get.call_count == 2 assert 'Could not send metrics to DataDog' in captured.out
async def test_get_controller_unknown_error(hass): """Check that get_controller can handle unknown errors.""" with patch("aiounifi.Controller.login", side_effect=aiounifi.AiounifiException), pytest.raises( unifi.errors.AuthenticationRequired): await unifi.controller.get_controller(hass, **CONTROLLER_DATA)
def decorator(self, mocker, mock_cache): with patch("aiocache.decorators._get_cache", return_value=mock_cache): yield multi_cached(keys_from_attr="keys")
async def setup_unifi_integration( hass, config=ENTRY_CONFIG, options=ENTRY_OPTIONS, sites=SITES, clients_response=None, devices_response=None, clients_all_response=None, wlans_response=None, known_wireless_clients=None, controllers=None, ): """Create the UniFi controller.""" configuration = {} if controllers: configuration = {unifi.DOMAIN: {unifi.CONF_CONTROLLERS: controllers}} assert await async_setup_component(hass, unifi.DOMAIN, configuration) config_entry = MockConfigEntry( domain=unifi.DOMAIN, data=deepcopy(config), options=deepcopy(options), entry_id=1, ) config_entry.add_to_hass(hass) if known_wireless_clients: hass.data[UNIFI_WIRELESS_CLIENTS].update_data(known_wireless_clients, config_entry) mock_client_responses = deque() if clients_response: mock_client_responses.append(clients_response) mock_device_responses = deque() if devices_response: mock_device_responses.append(devices_response) mock_client_all_responses = deque() if clients_all_response: mock_client_all_responses.append(clients_all_response) mock_wlans_responses = deque() if wlans_response: mock_wlans_responses.append(wlans_response) mock_requests = [] async def mock_request(self, method, path, json=None): mock_requests.append({"method": method, "path": path, "json": json}) if path == "s/{site}/stat/sta" and mock_client_responses: return mock_client_responses.popleft() if path == "s/{site}/stat/device" and mock_device_responses: return mock_device_responses.popleft() if path == "s/{site}/rest/user" and mock_client_all_responses: return mock_client_all_responses.popleft() if path == "s/{site}/rest/wlanconf" and mock_wlans_responses: return mock_wlans_responses.popleft() return {} # "aiounifi.Controller.start_websocket", return_value=True with patch("aiounifi.Controller.login", return_value=True), patch("aiounifi.Controller.sites", return_value=sites), patch( "aiounifi.Controller.request", new=mock_request), patch.object( aiounifi.websocket.WSClient, "start", return_value=True): await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() controller_id = unifi.get_controller_id_from_config_entry(config_entry) if controller_id not in hass.data[unifi.DOMAIN]: return None controller = hass.data[unifi.DOMAIN][controller_id] controller.mock_client_responses = mock_client_responses controller.mock_device_responses = mock_device_responses controller.mock_client_all_responses = mock_client_all_responses controller.mock_wlans_responses = mock_wlans_responses controller.mock_requests = mock_requests return controller
async def test_form_import(hass): """Test we get the form with import source.""" await setup.async_setup_component(hass, "persistent_notification", {}) mocked_elk = mock_elk(invalid_auth=False) with patch( "homeassistant.components.elkm1.config_flow.elkm1.Elk", return_value=mocked_elk, ), patch("homeassistant.components.elkm1.async_setup", return_value=True) as mock_setup, patch( "homeassistant.components.elkm1.async_setup_entry", return_value=True, ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={ "host": "elks://1.2.3.4", "username": "******", "password": "******", "temperature_unit": "C", "auto_configure": False, "keypad": { "enabled": True, "exclude": [], "include": [[1, 1], [2, 2], [3, 3]], }, "output": { "enabled": False, "exclude": [], "include": [] }, "counter": { "enabled": False, "exclude": [], "include": [] }, "plc": { "enabled": False, "exclude": [], "include": [] }, "prefix": "ohana", "setting": { "enabled": False, "exclude": [], "include": [] }, "area": { "enabled": False, "exclude": [], "include": [] }, "task": { "enabled": False, "exclude": [], "include": [] }, "thermostat": { "enabled": False, "exclude": [], "include": [] }, "zone": { "enabled": True, "exclude": [[15, 15], [28, 208]], "include": [], }, }, ) assert result["type"] == "create_entry" assert result["title"] == "ohana" assert result["data"] == { "auto_configure": False, "host": "elks://1.2.3.4", "keypad": { "enabled": True, "exclude": [], "include": [[1, 1], [2, 2], [3, 3]] }, "output": { "enabled": False, "exclude": [], "include": [] }, "password": "******", "plc": { "enabled": False, "exclude": [], "include": [] }, "prefix": "ohana", "setting": { "enabled": False, "exclude": [], "include": [] }, "area": { "enabled": False, "exclude": [], "include": [] }, "counter": { "enabled": False, "exclude": [], "include": [] }, "task": { "enabled": False, "exclude": [], "include": [] }, "temperature_unit": "C", "thermostat": { "enabled": False, "exclude": [], "include": [] }, "username": "******", "zone": { "enabled": True, "exclude": [[15, 15], [28, 208]], "include": [] }, } await hass.async_block_till_done() assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
async def test_dump_data(hass): """Test that we cache data.""" states = [ State('input_boolean.b0', 'on'), State('input_boolean.b1', 'on'), State('input_boolean.b2', 'on'), ] entity = Entity() entity.hass = hass entity.entity_id = 'input_boolean.b0' await entity.async_internal_added_to_hass() entity = RestoreEntity() entity.hass = hass entity.entity_id = 'input_boolean.b1' await entity.async_internal_added_to_hass() data = await RestoreStateData.async_get_instance(hass) now = dt_util.utcnow() data.last_states = { 'input_boolean.b0': StoredState(State('input_boolean.b0', 'off'), now), 'input_boolean.b1': StoredState(State('input_boolean.b1', 'off'), now), 'input_boolean.b2': StoredState(State('input_boolean.b2', 'off'), now), 'input_boolean.b3': StoredState(State('input_boolean.b3', 'off'), now), 'input_boolean.b4': StoredState(State('input_boolean.b4', 'off'), datetime(1985, 10, 26, 1, 22, tzinfo=dt_util.UTC)), } with patch('homeassistant.helpers.restore_state.Store.async_save' ) as mock_write_data, patch.object(hass.states, 'async_all', return_value=states): await data.async_dump_states() assert mock_write_data.called args = mock_write_data.mock_calls[0][1] written_states = args[0] # b0 should not be written, since it didn't extend RestoreEntity # b1 should be written, since it is present in the current run # b2 should not be written, since it is not registered with the helper # b3 should be written, since it is still not expired # b4 should not be written, since it is now expired assert len(written_states) == 2 assert written_states[0]['state']['entity_id'] == 'input_boolean.b1' assert written_states[0]['state']['state'] == 'on' assert written_states[1]['state']['entity_id'] == 'input_boolean.b3' assert written_states[1]['state']['state'] == 'off' # Test that removed entities are not persisted await entity.async_remove() with patch('homeassistant.helpers.restore_state.Store.async_save' ) as mock_write_data, patch.object(hass.states, 'async_all', return_value=states): await data.async_dump_states() assert mock_write_data.called args = mock_write_data.mock_calls[0][1] written_states = args[0] assert len(written_states) == 1 assert written_states[0]['state']['entity_id'] == 'input_boolean.b3' assert written_states[0]['state']['state'] == 'off'
async def test_sensors(hass: HomeAssistant, aioclient_mock: AiohttpClientMocker) -> None: """Test the creation and values of the WLED sensors.""" entry = await init_integration(hass, aioclient_mock, skip_setup=True) registry = await hass.helpers.entity_registry.async_get_registry() # Pre-create registry entries for disabled by default sensors registry.async_get_or_create( SENSOR_DOMAIN, DOMAIN, "aabbccddeeff_uptime", suggested_object_id="wled_rgb_light_uptime", disabled_by=None, ) registry.async_get_or_create( SENSOR_DOMAIN, DOMAIN, "aabbccddeeff_free_heap", suggested_object_id="wled_rgb_light_free_memory", disabled_by=None, ) # Setup test_time = datetime(2019, 11, 11, 9, 10, 32, tzinfo=dt_util.UTC) with patch("homeassistant.components.wled.sensor.utcnow", return_value=test_time): await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() state = hass.states.get("sensor.wled_rgb_light_estimated_current") assert state assert state.attributes.get(ATTR_ICON) == "mdi:power" assert state.attributes.get(ATTR_LED_COUNT) == 30 assert state.attributes.get(ATTR_MAX_POWER) == 850 assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == CURRENT_MA assert state.state == "470" entry = registry.async_get("sensor.wled_rgb_light_estimated_current") assert entry assert entry.unique_id == "aabbccddeeff_estimated_current" state = hass.states.get("sensor.wled_rgb_light_uptime") assert state assert state.attributes.get(ATTR_ICON) == "mdi:clock-outline" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None assert state.state == "2019-11-11T09:10:00+00:00" entry = registry.async_get("sensor.wled_rgb_light_uptime") assert entry assert entry.unique_id == "aabbccddeeff_uptime" state = hass.states.get("sensor.wled_rgb_light_free_memory") assert state assert state.attributes.get(ATTR_ICON) == "mdi:memory" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == DATA_BYTES assert state.state == "14600" entry = registry.async_get("sensor.wled_rgb_light_free_memory") assert entry assert entry.unique_id == "aabbccddeeff_free_heap"
def driver(): """Patch AccessoryDriver without zeroconf or HAPServer.""" with patch("pyhap.accessory_driver.HAPServer"), patch( "pyhap.accessory_driver.Zeroconf"), patch( "pyhap.accessory_driver.AccessoryDriver.persist"): yield AccessoryDriver()
async def test_put_item(self): """ TableConnection.put_item """ conn = TableConnection(self.test_table_name) with patch(PATCH_METHOD) as req: req.return_value = DESCRIBE_TABLE_DATA await conn.describe_table() with patch(PATCH_METHOD) as req: req.return_value = {} await conn.put_item('foo-key', range_key='foo-range-key', attributes={'ForumName': 'foo-value'}) params = { 'ReturnConsumedCapacity': 'TOTAL', 'TableName': self.test_table_name, 'Item': { 'ForumName': { 'S': 'foo-value' }, 'Subject': { 'S': 'foo-range-key' } } } self.assertEqual(req.call_args[0][1], params) with patch(PATCH_METHOD) as req: req.return_value = {} await conn.put_item('foo-key', range_key='foo-range-key', attributes={'ForumName': 'foo-value'}) params = { 'ReturnConsumedCapacity': 'TOTAL', 'Item': { 'ForumName': { 'S': 'foo-value' }, 'Subject': { 'S': 'foo-range-key' } }, 'TableName': self.test_table_name } self.assertEqual(req.call_args[0][1], params) with patch(PATCH_METHOD) as req: req.return_value = HttpOK(), {} await conn.put_item('foo-key', range_key='foo-range-key', attributes={'ForumName': 'foo-value'}, condition=Path('ForumName').does_not_exist()) params = { 'ReturnConsumedCapacity': 'TOTAL', 'Item': { 'ForumName': { 'S': 'foo-value' }, 'Subject': { 'S': 'foo-range-key' } }, 'TableName': self.test_table_name, 'ConditionExpression': 'attribute_not_exists (#0)', 'ExpressionAttributeNames': { '#0': 'ForumName' } } self.assertEqual(req.call_args[0][1], params) with patch(PATCH_METHOD) as req: req.return_value = HttpOK(), {} await conn.put_item('foo-key', range_key='foo-range-key', attributes={'ForumName': 'foo-value'}, conditional_operator='and', expected={'ForumName': { 'Exists': False }}) params = { 'ReturnConsumedCapacity': 'TOTAL', 'Item': { 'ForumName': { 'S': 'foo-value' }, 'Subject': { 'S': 'foo-range-key' } }, 'TableName': self.test_table_name, 'ConditionExpression': 'attribute_not_exists (#0)', 'ExpressionAttributeNames': { '#0': 'ForumName' } } self.assertEqual(req.call_args[0][1], params)
async def test_create_schedule_request( create_schedule_response: MagicMock, ) -> None: """Unit test-cases for /switcher/create_schedule request. Args: create_schedule_response: fixture of mocked ``SwitcherV2CreateScheduleResponseMSG`` object. """ with patch( "request_handlers.SwitcherV2Api.create_schedule", return_value=create_schedule_response, ): async with ClientSession() as session: selected_test_day = WEEKDAY_TUP[get_next_weekday()] async with session.put( URL_CREATE_SCHEDULE, **{ "json": { consts.PARAM_DAYS: [selected_test_day], consts.PARAM_START_HOURS: "20", consts.PARAM_START_MINUTES: "0", consts.PARAM_STOP_HOURS: "20", consts.PARAM_STOP_MINUTES: "30", } }, ) as response: assert response.status == 200 body = await response.json() assert body[consts.KEY_SUCCESSFUL] async with session.put( URL_CREATE_SCHEDULE, **{ "json": { consts.PARAM_DAYS: [selected_test_day], consts.PARAM_START_HOURS: "24", consts.PARAM_START_MINUTES: "0", consts.PARAM_STOP_HOURS: "20", consts.PARAM_STOP_MINUTES: "30", } }, ) as response: assert response.status == 400 body = await response.text() bs4scrap = BeautifulSoup(body, "html.parser") assert (bs4scrap.text == "Error: Unknown start_hours, accepts 0 to 23.") async with session.put( URL_CREATE_SCHEDULE, **{ "json": { consts.PARAM_DAYS: [selected_test_day], consts.PARAM_START_HOURS: "23", consts.PARAM_START_MINUTES: "60", consts.PARAM_STOP_HOURS: "20", consts.PARAM_STOP_MINUTES: "30", } }, ) as response: assert response.status == 400 body = await response.text() bs4scrap = BeautifulSoup(body, "html.parser") assert (bs4scrap.text == "Error: Unknown start_minutes, accepts 0 to 59.") async with session.put( URL_CREATE_SCHEDULE, **{ "json": { consts.PARAM_DAYS: [selected_test_day], consts.PARAM_START_HOURS: "23", consts.PARAM_START_MINUTES: "0", consts.PARAM_STOP_HOURS: "24", consts.PARAM_STOP_MINUTES: "30", } }, ) as response: assert response.status == 400 body = await response.text() bs4scrap = BeautifulSoup(body, "html.parser") assert (bs4scrap.text == "Error: Unknown stop_hours, accepts 0 to 23.") async with session.put( URL_CREATE_SCHEDULE, **{ "json": { consts.PARAM_DAYS: [selected_test_day], consts.PARAM_START_HOURS: "23", consts.PARAM_START_MINUTES: "0", consts.PARAM_STOP_HOURS: "20", consts.PARAM_STOP_MINUTES: "60", } }, ) as response: assert response.status == 400 body = await response.text() bs4scrap = BeautifulSoup(body, "html.parser") assert (bs4scrap.text == "Error: Unknown stop_minutes, accepts 0 to 59.") async with session.put( URL_CREATE_SCHEDULE, **{ "json": { consts.PARAM_DAYS: [selected_test_day], consts.PARAM_START_MINUTES: "0", consts.PARAM_STOP_HOURS: "20", consts.PARAM_STOP_MINUTES: "30", } }, ) as response: assert response.status == 400 body = await response.text() bs4scrap = BeautifulSoup(body, "html.parser") assert ( bs4scrap.text == "Error: Argument start_hours is missing.") async with session.put( URL_CREATE_SCHEDULE, **{ "json": { consts.PARAM_DAYS: [selected_test_day], consts.PARAM_START_HOURS: "23", consts.PARAM_STOP_HOURS: "20", consts.PARAM_STOP_MINUTES: "30", } }, ) as response: assert response.status == 400 body = await response.text() bs4scrap = BeautifulSoup(body, "html.parser") assert (bs4scrap.text == "Error: Argument start_minutes is missing.") async with session.put( URL_CREATE_SCHEDULE, **{ "json": { consts.PARAM_DAYS: [selected_test_day], consts.PARAM_START_HOURS: "23", consts.PARAM_START_MINUTES: "0", consts.PARAM_STOP_MINUTES: "30", } }, ) as response: assert response.status == 400 body = await response.text() bs4scrap = BeautifulSoup(body, "html.parser") assert ( bs4scrap.text == "Error: Argument stop_hours is missing.") async with session.put( URL_CREATE_SCHEDULE, **{ "json": { consts.PARAM_DAYS: [selected_test_day], consts.PARAM_START_HOURS: "23", consts.PARAM_START_MINUTES: "0", consts.PARAM_STOP_HOURS: "20", } }, ) as response: assert response.status == 400 body = await response.text() bs4scrap = BeautifulSoup(body, "html.parser") assert (bs4scrap.text == "Error: Argument stop_minutes is missing.") async with session.put( URL_CREATE_SCHEDULE, **{ "json": { consts.PARAM_DAYS: ["Fakeday"], consts.PARAM_START_HOURS: "20", consts.PARAM_START_MINUTES: "0", consts.PARAM_STOP_HOURS: "20", consts.PARAM_STOP_MINUTES: "30", } }, ) as response: assert response.status == 400 body = await response.text() bs4scrap = BeautifulSoup(body, "html.parser") assert ( bs4scrap.text == "Error: Unrecognized day requests, check documentation.") async with session.put(URL_CREATE_SCHEDULE) as response: assert response.status == 400 body = await response.text() bs4scrap = BeautifulSoup(body, "html.parser") assert bs4scrap.text == "Error: Json body is missing." create_schedule_response.msg_type = ResponseMessageType.STATE async with session.put( URL_CREATE_SCHEDULE, **{ "json": { consts.PARAM_DAYS: [selected_test_day], consts.PARAM_START_HOURS: "20", consts.PARAM_START_MINUTES: "0", consts.PARAM_STOP_HOURS: "20", consts.PARAM_STOP_MINUTES: "30", } }, ) as response: assert response.status == 200 body = await response.json() assert not body[consts.KEY_SUCCESSFUL] assert consts.KEY_MESSAGE in body
def status_fixture(): """Mock the device status.""" status_patch = patch("libsoundtouch.device.SoundTouchDevice.status", side_effect=MockStatusPlaying) with status_patch as status: yield status
async def test_enable_schedule_request( disable_enable_schedule_response: MagicMock, ) -> None: """Unit test-cases for /switcher/enable_schedule request. Args: disable_enable_schedule_response: fixture of mocked ``SwitcherV2DisableEnableScheduleResponseMSG`` object. """ with patch( "request_handlers.SwitcherV2Api.disable_enable_schedule", return_value=disable_enable_schedule_response, ): async with ClientSession() as session: async with session.patch( URL_ENABLE_SCHEDULE, params={ consts.PARAM_SCHEDULE_DATA: consts.DUMMY_SCHEDULE_DATA }, ) as response: assert response.status == 200 body = await response.json() assert body[consts.KEY_SUCCESSFUL] async with session.patch( URL_ENABLE_SCHEDULE, **{ "json": { consts.PARAM_SCHEDULE_DATA: consts.DUMMY_SCHEDULE_DATA } }, ) as response: assert response.status == 200 body = await response.json() assert body[consts.KEY_SUCCESSFUL] async with session.patch( URL_ENABLE_SCHEDULE, params={consts.PARAM_SCHEDULE_DATA: "not_24_len"}, ) as response: assert response.status == 400 body = await response.text() bs4scrap = BeautifulSoup(body, "html.parser") assert (bs4scrap.text == "Error: Argument schedule_data is length is no 24.") async with session.patch(URL_ENABLE_SCHEDULE) as response: assert response.status == 400 body = await response.text() bs4scrap = BeautifulSoup(body, "html.parser") assert (bs4scrap.text == "Error: Argument schedule_data is missing.") disable_enable_schedule_response.msg_type = ( ResponseMessageType.STATE) async with session.patch( URL_ENABLE_SCHEDULE, params={ consts.PARAM_SCHEDULE_DATA: consts.DUMMY_SCHEDULE_DATA }, ) as response: assert response.status == 200 body = await response.json() assert not body[consts.KEY_SUCCESSFUL] assert consts.KEY_MESSAGE in body
async def test_004_on_register_callback(self): with patch('slixmpp.stanza.Iq.send', new=CoroutineMock()) as send_mock: await self.bus._on_register(None) send_mock.assert_called_once_with()
async def test_set_auto_shutdown_request( set_auto_shutdown_response: MagicMock, ) -> None: """Unit test-cases for /switcher/set_auto_shutdown request. Args: set_auto_shutdown_response: fixture of mocked ``SwitcherV2SetAutoOffResponseMSG`` object. """ with patch( "request_handlers.SwitcherV2Api.set_auto_shutdown", return_value=set_auto_shutdown_response, ): async with ClientSession() as session: async with session.post( URL_SET_AUTO_SHUTDOWN, params={ consts.PARAM_HOURS: "1", consts.PARAM_MINUTES: "30" }, ) as response: assert response.status == 200 body = await response.json() assert body[consts.KEY_SUCCESSFUL] async with session.post( URL_SET_AUTO_SHUTDOWN, **{ "json": { consts.PARAM_HOURS: "1", consts.PARAM_MINUTES: "30", } }, ) as response: assert response.status == 200 body = await response.json() assert body[consts.KEY_SUCCESSFUL] async with session.post( URL_SET_AUTO_SHUTDOWN, params={ consts.PARAM_HOURS: "3", consts.PARAM_MINUTES: "1" }, ) as response: assert response.status == 400 body = await response.text() bs4scrap = BeautifulSoup(body, "html.parser") assert ( bs4scrap.text == "Error: Auto shutdown can be set between 1 and 3 hours.") async with session.post(URL_SET_AUTO_SHUTDOWN) as response: assert response.status == 400 body = await response.text() bs4scrap = BeautifulSoup(body, "html.parser") assert ( bs4scrap.text == "Error: One of the arguments hours or minutes is missing." # noqa: E501 ) set_auto_shutdown_response.msg_type = ResponseMessageType.STATE async with session.post( URL_SET_AUTO_SHUTDOWN, params={ consts.PARAM_HOURS: "1", consts.PARAM_MINUTES: "30" }, ) as response: assert response.status == 200 body = await response.json() assert not body[consts.KEY_SUCCESSFUL] assert consts.KEY_MESSAGE in body
async def test_see_passive_zone_state(hass): """Test that the device tracker sets gps for passive trackers.""" register_time = datetime(2015, 9, 15, 23, tzinfo=dt_util.UTC) scan_time = datetime(2015, 9, 15, 23, 1, tzinfo=dt_util.UTC) with assert_setup_component(1, zone.DOMAIN): zone_info = { 'name': 'Home', 'latitude': 1, 'longitude': 2, 'radius': 250, 'passive': False } await async_setup_component(hass, zone.DOMAIN, { 'zone': zone_info }) scanner = get_component(hass, 'device_tracker.test').SCANNER scanner.reset() scanner.come_home('dev1') with patch('homeassistant.components.device_tracker.dt_util.utcnow', return_value=register_time): with assert_setup_component(1, device_tracker.DOMAIN): assert await async_setup_component(hass, device_tracker.DOMAIN, { device_tracker.DOMAIN: { CONF_PLATFORM: 'test', device_tracker.CONF_CONSIDER_HOME: 59, }}) await hass.async_block_till_done() state = hass.states.get('device_tracker.dev1') attrs = state.attributes assert STATE_HOME == state.state assert state.object_id == 'dev1' assert state.name == 'dev1' assert attrs.get('friendly_name') == 'dev1' assert attrs.get('latitude') == 1 assert attrs.get('longitude') == 2 assert attrs.get('gps_accuracy') == 0 assert attrs.get('source_type') == \ device_tracker.SOURCE_TYPE_ROUTER scanner.leave_home('dev1') with patch('homeassistant.components.device_tracker.dt_util.utcnow', return_value=scan_time): async_fire_time_changed(hass, scan_time) await hass.async_block_till_done() state = hass.states.get('device_tracker.dev1') attrs = state.attributes assert STATE_NOT_HOME == state.state assert state.object_id == 'dev1' assert state.name == 'dev1' assert attrs.get('friendly_name') == 'dev1' assert attrs.get('latitude')is None assert attrs.get('longitude')is None assert attrs.get('gps_accuracy')is None assert attrs.get('source_type') == \ device_tracker.SOURCE_TYPE_ROUTER
async def test_turn_on_request(control_response: MagicMock) -> None: """Unit test-cases for /switcher/turn_on request. Args: control_response: fixture of mocked ``SwitcherV2ControlResponseMSG`` object. """ with patch( "request_handlers.SwitcherV2Api.control_device", return_value=control_response, ): async with ClientSession() as session: async with session.post(URL_TURN_ON) as response: assert response.status == 200 body = await response.json() assert body[consts.KEY_SUCCESSFUL] async with session.post(URL_TURN_ON, params={consts.PARAM_MINUTES: 30}) as response: assert response.status == 200 body = await response.json() assert body[consts.KEY_SUCCESSFUL] async with session.post(URL_TURN_ON, **{"json": { consts.PARAM_MINUTES: 30 }}) as response: assert response.status == 200 body = await response.json() assert body[consts.KEY_SUCCESSFUL] async with session.post(URL_TURN_ON, params={consts.PARAM_MINUTES: 181}) as response: assert response.status == 400 body = await response.text() bs4scrap = BeautifulSoup(body, "html.parser") assert (bs4scrap.text == "Error: Can only accept timer for 1 to 180 minutes.") control_response.msg_type = ResponseMessageType.STATE async with session.post(URL_TURN_ON) as response: assert response.status == 200 body = await response.json() assert not body[consts.KEY_SUCCESSFUL] assert consts.KEY_MESSAGE in body control_response.successful = False control_response.msg_type = ResponseMessageType.CONTROL async with session.post(URL_TURN_ON) as response: assert response.status == 200 body = await response.json() assert not body[consts.KEY_SUCCESSFUL] async with session.post(URL_TURN_ON, params={consts.PARAM_MINUTES: "noint"}) as response: assert response.status == 500 body = await response.text() bs4scrap = BeautifulSoup(body, "html.parser") assert bs4scrap.h1.text == "Internal Server Error" async with session.post( URL_TURN_ON, **{"json": { consts.PARAM_MINUTES: "noint" }}) as response: assert response.status == 500 body = await response.text() bs4scrap = BeautifulSoup(body, "html.parser") assert bs4scrap.h1.text == "Internal Server Error"
async def test_get_controller(hass): """Successful call.""" with patch("aiounifi.Controller.login", return_value=Mock()): assert await unifi.controller.get_controller(hass, **CONTROLLER_DATA)
def load_bot_data_mock(): with am.patch('mosbot.command.load_bot_data') as m: yield m