async def test_get_action_capabilities_meter_triggers( hass: HomeAssistant, client: Client, aeon_smart_switch_6: Node, integration: ConfigEntry, ) -> None: """Test we get the expected action capabilities for meter triggers.""" node = aeon_smart_switch_6 dev_reg = device_registry.async_get(hass) device = dev_reg.async_get_device({get_device_id(client, node)}) assert device capabilities = await device_action.async_get_action_capabilities( hass, { "platform": "device", "domain": DOMAIN, "device_id": device.id, "entity_id": "sensor.meter", "type": "reset_meter", }, ) assert capabilities and "extra_fields" in capabilities assert voluptuous_serialize.convert( capabilities["extra_fields"], custom_serializer=cv.custom_serializer ) == [{"type": "string", "name": "value", "optional": True}]
async def test_removed_device(hass, client, multiple_devices, integration): """Test that the device registry gets updated when a device gets removed.""" nodes = multiple_devices # Verify how many nodes are available assert len(client.driver.controller.nodes) == 2 # Make sure there are the same number of devices dev_reg = dr.async_get(hass) device_entries = dr.async_entries_for_config_entry(dev_reg, integration.entry_id) assert len(device_entries) == 2 # Check how many entities there are ent_reg = er.async_get(hass) entity_entries = er.async_entries_for_config_entry(ent_reg, integration.entry_id) assert len(entity_entries) == 24 # Remove a node and reload the entry old_node = nodes.pop(13) await hass.config_entries.async_reload(integration.entry_id) await hass.async_block_till_done() # Assert that the node and all of it's entities were removed from the device and # entity registry device_entries = dr.async_entries_for_config_entry(dev_reg, integration.entry_id) assert len(device_entries) == 1 entity_entries = er.async_entries_for_config_entry(ent_reg, integration.entry_id) assert len(entity_entries) == 15 assert dev_reg.async_get_device({get_device_id(client, old_node)}) is None
async def test_humanifying_zwave_js_value_notification_event( hass, client, lock_schlage_be469, integration): """Test humanifying Z-Wave JS value notification events.""" dev_reg = dr.async_get(hass) device = dev_reg.async_get_device( identifiers={get_device_id(client.driver, lock_schlage_be469)}) assert device hass.config.components.add("recorder") assert await async_setup_component(hass, "logbook", {}) events = mock_humanify( hass, [ MockRow( ZWAVE_JS_VALUE_NOTIFICATION_EVENT, { "device_id": device.id, "command_class": CommandClass.SCENE_ACTIVATION.value, "command_class_name": "Scene Activation", "label": "Scene ID", "value": "001", }, ), ], ) assert events[0]["name"] == "Touchscreen Deadbolt" assert events[0]["domain"] == "zwave_js" assert ( events[0]["message"] == "fired Scene Activation CC 'value notification' event for 'Scene ID': '001'" )
async def test_skip_old_entity_migration_for_multiple(hass, hank_binary_switch_state, client, integration): """Test that multiple entities of the same value but on a different endpoint get skipped.""" node = Node(client, copy.deepcopy(hank_binary_switch_state)) driver = client.driver assert driver ent_reg = er.async_get(hass) dev_reg = dr.async_get(hass) device = dev_reg.async_get_or_create( config_entry_id=integration.entry_id, identifiers={get_device_id(driver, node)}, manufacturer=hank_binary_switch_state["deviceConfig"]["manufacturer"], model=hank_binary_switch_state["deviceConfig"]["label"], ) SENSOR_NAME = "sensor.smart_plug_with_two_usb_ports_value_electric_consumed" entity_name = SENSOR_NAME.split(".")[1] # Create two entity entrrys using different endpoints old_unique_id_1 = f"{driver.controller.home_id}.32-50-1-value-66049" entity_entry = ent_reg.async_get_or_create( "sensor", DOMAIN, old_unique_id_1, suggested_object_id=f"{entity_name}_1", config_entry=integration, original_name=f"{entity_name}_1", device_id=device.id, ) assert entity_entry.entity_id == f"{SENSOR_NAME}_1" assert entity_entry.unique_id == old_unique_id_1 # Create two entity entrrys using different endpoints old_unique_id_2 = f"{driver.controller.home_id}.32-50-2-value-66049" entity_entry = ent_reg.async_get_or_create( "sensor", DOMAIN, old_unique_id_2, suggested_object_id=f"{entity_name}_2", config_entry=integration, original_name=f"{entity_name}_2", device_id=device.id, ) assert entity_entry.entity_id == f"{SENSOR_NAME}_2" assert entity_entry.unique_id == old_unique_id_2 # Add a ready node, unique ID should be migrated event = {"node": node} driver.controller.emit("node added", event) await hass.async_block_till_done() # Check that new RegistryEntry is created using new unique ID format entity_entry = ent_reg.async_get(SENSOR_NAME) new_unique_id = f"{driver.controller.home_id}.32-50-0-value-66049" assert entity_entry.unique_id == new_unique_id # Check that the old entities stuck around because we skipped the migration step assert ent_reg.async_get_entity_id("sensor", DOMAIN, old_unique_id_1) assert ent_reg.async_get_entity_id("sensor", DOMAIN, old_unique_id_2)
async def test_removed_device(hass, client, climate_radio_thermostat_ct100_plus, lock_schlage_be469, integration): """Test that the device registry gets updated when a device gets removed.""" driver = client.driver assert driver # Verify how many nodes are available assert len(driver.controller.nodes) == 2 # Make sure there are the same number of devices dev_reg = dr.async_get(hass) device_entries = dr.async_entries_for_config_entry(dev_reg, integration.entry_id) assert len(device_entries) == 2 # Check how many entities there are ent_reg = er.async_get(hass) entity_entries = er.async_entries_for_config_entry(ent_reg, integration.entry_id) assert len(entity_entries) == 29 # Remove a node and reload the entry old_node = driver.controller.nodes.pop(13) await hass.config_entries.async_reload(integration.entry_id) await hass.async_block_till_done() # Assert that the node and all of it's entities were removed from the device and # entity registry device_entries = dr.async_entries_for_config_entry(dev_reg, integration.entry_id) assert len(device_entries) == 1 entity_entries = er.async_entries_for_config_entry(ent_reg, integration.entry_id) assert len(entity_entries) == 17 assert dev_reg.async_get_device({get_device_id(driver, old_node)}) is None
async def test_device_diagnostics( hass, client, multisensor_6, integration, hass_client, version_state, ): """Test the device level diagnostics data dump.""" dev_reg = async_get_dev_reg(hass) device = dev_reg.async_get_device( {get_device_id(client.driver, multisensor_6)}) assert device # Update a value and ensure it is reflected in the node state event = Event( type="value updated", data={ "source": "node", "event": "value updated", "nodeId": multisensor_6.node_id, "args": { "commandClassName": "Multilevel Sensor", "commandClass": 49, "endpoint": 0, "property": PROPERTY_ULTRAVIOLET, "newValue": 1, "prevValue": 0, "propertyName": PROPERTY_ULTRAVIOLET, }, }, ) multisensor_6.receive_event(event) diagnostics_data = await get_diagnostics_for_device( hass, hass_client, integration, device) assert diagnostics_data["versionInfo"] == { "driverVersion": version_state["driverVersion"], "serverVersion": version_state["serverVersion"], "minSchemaVersion": 0, "maxSchemaVersion": 0, } # Assert that we only have the entities that were discovered for this device # Entities that are created outside of discovery (e.g. node status sensor and # ping button) should not be in dump. assert len(diagnostics_data["entities"]) == len( list( async_discover_node_values(multisensor_6, device, {device.id: set()}))) assert diagnostics_data["state"] == { **multisensor_6.data, "statistics": { "commandsDroppedRX": 0, "commandsDroppedTX": 0, "commandsRX": 0, "commandsTX": 0, "timeoutResponse": 0, }, }
async def test_device_diagnostics( hass, client, multisensor_6, integration, hass_client, version_state, ): """Test the device level diagnostics data dump.""" dev_reg = async_get(hass) device = dev_reg.async_get_device({get_device_id(client, multisensor_6)}) assert device # Update a value and ensure it is reflected in the node state value_id = get_value_id(multisensor_6, CommandClass.SENSOR_MULTILEVEL, PROPERTY_ULTRAVIOLET) event = Event( type="value updated", data={ "source": "node", "event": "value updated", "nodeId": multisensor_6.node_id, "args": { "commandClassName": "Multilevel Sensor", "commandClass": 49, "endpoint": 0, "property": PROPERTY_ULTRAVIOLET, "newValue": 1, "prevValue": 0, "propertyName": PROPERTY_ULTRAVIOLET, }, }, ) multisensor_6.receive_event(event) diagnostics_data = await get_diagnostics_for_device( hass, hass_client, integration, device) assert diagnostics_data["versionInfo"] == { "driverVersion": version_state["driverVersion"], "serverVersion": version_state["serverVersion"], "minSchemaVersion": 0, "maxSchemaVersion": 0, } # Assert that the data returned doesn't match the stale node state data assert diagnostics_data["state"] != multisensor_6.data # Replace data for the value we updated and assert the new node data is the same # as what's returned updated_node_data = multisensor_6.data.copy() for idx, value in enumerate(updated_node_data["values"]): if _get_value_id_from_dict(multisensor_6, value) == value_id: updated_node_data["values"][idx] = multisensor_6.values[ value_id].data.copy() assert diagnostics_data["state"] == updated_node_data
async def test_get_actions( hass: HomeAssistant, client: Client, lock_schlage_be469: Node, integration: ConfigEntry, ) -> None: """Test we get the expected actions from a zwave_js node.""" node = lock_schlage_be469 dev_reg = device_registry.async_get(hass) device = dev_reg.async_get_device({get_device_id(client, node)}) assert device expected_actions = [ { "domain": DOMAIN, "type": "clear_lock_usercode", "device_id": device.id, "entity_id": "lock.touchscreen_deadbolt", }, { "domain": DOMAIN, "type": "set_lock_usercode", "device_id": device.id, "entity_id": "lock.touchscreen_deadbolt", }, { "domain": DOMAIN, "type": "refresh_value", "device_id": device.id, "entity_id": "lock.touchscreen_deadbolt", }, { "domain": DOMAIN, "type": "set_value", "device_id": device.id, }, { "domain": DOMAIN, "type": "ping", "device_id": device.id, }, { "domain": DOMAIN, "type": "set_config_parameter", "device_id": device.id, "parameter": 3, "bitmask": None, "subtype": f"{node.node_id}-112-0-3 (Beeper)", }, ] actions = await async_get_device_automations( hass, DeviceAutomationType.ACTION, device.id ) for action in expected_actions: assert action in actions
async def test_get_actions_meter( hass: HomeAssistant, client: Client, aeon_smart_switch_6: Node, integration: ConfigEntry, ) -> None: """Test we get the expected meter actions from a zwave_js node.""" node = aeon_smart_switch_6 dev_reg = device_registry.async_get(hass) device = dev_reg.async_get_device({get_device_id(client, node)}) assert device actions = await async_get_device_automations(hass, "action", device.id) filtered_actions = [action for action in actions if action["type"] == "reset_meter"] assert len(filtered_actions) > 0
async def test_old_entity_migration_notification_binary_sensor( hass, multisensor_6_state, client, integration): """Test old entity on a different endpoint is migrated to a new one for a notification binary sensor.""" node = Node(client, copy.deepcopy(multisensor_6_state)) driver = client.driver assert driver ent_reg = er.async_get(hass) dev_reg = dr.async_get(hass) device = dev_reg.async_get_or_create( config_entry_id=integration.entry_id, identifiers={get_device_id(driver, node)}, manufacturer=multisensor_6_state["deviceConfig"]["manufacturer"], model=multisensor_6_state["deviceConfig"]["label"], ) entity_name = NOTIFICATION_MOTION_BINARY_SENSOR.split(".")[1] # Create entity RegistryEntry using old unique ID format old_unique_id = ( f"{driver.controller.home_id}.52-113-1-Home Security-Motion sensor status.8" ) entity_entry = ent_reg.async_get_or_create( "binary_sensor", DOMAIN, old_unique_id, suggested_object_id=entity_name, config_entry=integration, original_name=entity_name, device_id=device.id, ) assert entity_entry.entity_id == NOTIFICATION_MOTION_BINARY_SENSOR assert entity_entry.unique_id == old_unique_id # Do this twice to make sure re-interview doesn't do anything weird for _ in range(0, 2): # Add a ready node, unique ID should be migrated event = {"node": node} driver.controller.emit("node added", event) await hass.async_block_till_done() # Check that new RegistryEntry is using new unique ID format entity_entry = ent_reg.async_get(NOTIFICATION_MOTION_BINARY_SENSOR) new_unique_id = ( f"{driver.controller.home_id}.52-113-0-Home Security-Motion sensor status.8" ) assert entity_entry.unique_id == new_unique_id assert (ent_reg.async_get_entity_id("binary_sensor", DOMAIN, old_unique_id) is None)
async def test_old_entity_migration(hass, hank_binary_switch_state, client, integration): """Test old entity on a different endpoint is migrated to a new one.""" node = Node(client, copy.deepcopy(hank_binary_switch_state)) driver = client.driver assert driver ent_reg = er.async_get(hass) dev_reg = dr.async_get(hass) device = dev_reg.async_get_or_create( config_entry_id=integration.entry_id, identifiers={get_device_id(driver, node)}, manufacturer=hank_binary_switch_state["deviceConfig"]["manufacturer"], model=hank_binary_switch_state["deviceConfig"]["label"], ) SENSOR_NAME = "sensor.smart_plug_with_two_usb_ports_value_electric_consumed" entity_name = SENSOR_NAME.split(".")[1] # Create entity RegistryEntry using fake endpoint old_unique_id = f"{driver.controller.home_id}.32-50-1-value-66049" entity_entry = ent_reg.async_get_or_create( "sensor", DOMAIN, old_unique_id, suggested_object_id=entity_name, config_entry=integration, original_name=entity_name, device_id=device.id, ) assert entity_entry.entity_id == SENSOR_NAME assert entity_entry.unique_id == old_unique_id # Do this twice to make sure re-interview doesn't do anything weird for i in range(0, 2): # Add a ready node, unique ID should be migrated event = {"node": node} driver.controller.emit("node added", event) await hass.async_block_till_done() # Check that new RegistryEntry is using new unique ID format entity_entry = ent_reg.async_get(SENSOR_NAME) new_unique_id = f"{client.driver.controller.home_id}.32-50-0-value-66049" assert entity_entry.unique_id == new_unique_id assert ent_reg.async_get_entity_id("sensor", DOMAIN, old_unique_id) is None
async def test_actions_multiple_calls( hass: HomeAssistant, client: Client, climate_radio_thermostat_ct100_plus: Node, integration: ConfigEntry, ) -> None: """Test actions can be called multiple times and still work.""" node = climate_radio_thermostat_ct100_plus device_id = get_device_id(client, node) dev_reg = device_registry.async_get(hass) device = dev_reg.async_get_device({device_id}) assert device assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": { "platform": "event", "event_type": "test_event_refresh_value", }, "action": { "domain": DOMAIN, "type": "refresh_value", "device_id": device.id, "entity_id": "climate.z_wave_thermostat", }, }, ] }, ) # Trigger automation multiple times to confirm that it works each time for _ in range(5): with patch("zwave_js_server.model.node.Node.async_poll_value" ) as mock_call: hass.bus.async_fire("test_event_refresh_value") await hass.async_block_till_done() mock_call.assert_called_once() args = mock_call.call_args_list[0][0] assert len(args) == 1 assert args[0].value_id == "13-64-1-mode"
async def test_reset_meter_action( hass: HomeAssistant, client: Client, aeon_smart_switch_6: Node, integration: ConfigEntry, ) -> None: """Test reset_meter action.""" node = aeon_smart_switch_6 device_id = get_device_id(client, node) dev_reg = device_registry.async_get(hass) device = dev_reg.async_get_device({device_id}) assert device assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": { "platform": "event", "event_type": "test_event_reset_meter", }, "action": { "domain": DOMAIN, "type": "reset_meter", "device_id": device.id, "entity_id": "sensor.smart_switch_6_electric_consumed_kwh", }, }, ] }, ) with patch( "zwave_js_server.model.endpoint.Endpoint.async_invoke_cc_api" ) as mock_call: hass.bus.async_fire("test_event_reset_meter") await hass.async_block_till_done() mock_call.assert_called_once() args = mock_call.call_args_list[0][0] assert len(args) == 2 assert args[0] == CommandClass.METER assert args[1] == "reset"
async def test_device_diagnostics( hass, client, multisensor_6, integration, hass_client, version_state, ): """Test the device level diagnostics data dump.""" dev_reg = async_get(hass) device = dev_reg.async_get_device({get_device_id(client, multisensor_6)}) assert device # Update a value and ensure it is reflected in the node state event = Event( type="value updated", data={ "source": "node", "event": "value updated", "nodeId": multisensor_6.node_id, "args": { "commandClassName": "Multilevel Sensor", "commandClass": 49, "endpoint": 0, "property": PROPERTY_ULTRAVIOLET, "newValue": 1, "prevValue": 0, "propertyName": PROPERTY_ULTRAVIOLET, }, }, ) multisensor_6.receive_event(event) diagnostics_data = await get_diagnostics_for_device( hass, hass_client, integration, device ) assert diagnostics_data["versionInfo"] == { "driverVersion": version_state["driverVersion"], "serverVersion": version_state["serverVersion"], "minSchemaVersion": 0, "maxSchemaVersion": 0, } assert diagnostics_data["state"] == multisensor_6.data
async def test_unavailable_entity_actions( hass: HomeAssistant, client: Client, lock_schlage_be469: Node, integration: ConfigEntry, ) -> None: """Test unavailable entities are not included in actions list.""" entity_id_unavailable = "binary_sensor.touchscreen_deadbolt_home_security_intrusion" hass.states.async_set(entity_id_unavailable, STATE_UNAVAILABLE, force_update=True) await hass.async_block_till_done() node = lock_schlage_be469 dev_reg = device_registry.async_get(hass) device = dev_reg.async_get_device({get_device_id(client, node)}) assert device actions = await async_get_device_automations(hass, "action", device.id) assert not any( action.get("entity_id") == entity_id_unavailable for action in actions)
async def test_different_endpoint_migration_status_sensor( hass, hank_binary_switch_state, client, integration ): """Test that the different endpoint migration logic skips over the status sensor.""" node = Node(client, copy.deepcopy(hank_binary_switch_state)) ent_reg = er.async_get(hass) dev_reg = dr.async_get(hass) device = dev_reg.async_get_or_create( config_entry_id=integration.entry_id, identifiers={get_device_id(client, node)}, manufacturer=hank_binary_switch_state["deviceConfig"]["manufacturer"], model=hank_binary_switch_state["deviceConfig"]["label"], ) SENSOR_NAME = "sensor.smart_plug_with_two_usb_ports_status_sensor" entity_name = SENSOR_NAME.split(".")[1] # Create entity RegistryEntry using fake endpoint old_unique_id = f"{client.driver.controller.home_id}.32.node_status" entity_entry = ent_reg.async_get_or_create( "sensor", DOMAIN, old_unique_id, suggested_object_id=entity_name, config_entry=integration, original_name=entity_name, device_id=device.id, ) assert entity_entry.entity_id == SENSOR_NAME assert entity_entry.unique_id == old_unique_id # Do this twice to make sure re-interview doesn't do anything weird for i in range(0, 2): # Add a ready node, unique ID should be migrated event = {"node": node} client.driver.controller.emit("node added", event) await hass.async_block_till_done() # Check that the RegistryEntry is using the same unique ID entity_entry = ent_reg.async_get(SENSOR_NAME) assert entity_entry.unique_id == old_unique_id
ATTR_ENDPOINT: endpoint, ATTR_PROPERTY_KEY: value.property_key, ATTR_PROPERTY_KEY_NAME: value.property_key_name, ATTR_PREVIOUS_VALUE: prev_value, ATTR_PREVIOUS_VALUE_RAW: prev_value_raw, ATTR_CURRENT_VALUE: curr_value, ATTR_CURRENT_VALUE_RAW: curr_value_raw, "description": f"Z-Wave value {value_id} updated on {device_name}", } hass.async_run_hass_job(job, {"trigger": payload}) for node in nodes: driver = node.client.driver assert driver is not None # The node comes from the driver. device_identifier = get_device_id(driver, node) device = dev_reg.async_get_device({device_identifier}) assert device value_id = get_value_id(node, command_class, property_, endpoint, property_key) value = node.values[value_id] # We need to store the current value and device for the callback unsubs.append( node.on( "value updated", functools.partial(async_on_value_updated, value, device), )) @callback def async_remove() -> None: """Remove state listeners async."""
async def test_humanifying_zwave_js_notification_event(hass, client, lock_schlage_be469, integration): """Test humanifying Z-Wave JS notification events.""" dev_reg = dr.async_get(hass) device = dev_reg.async_get_device( identifiers={get_device_id(client.driver, lock_schlage_be469)}) assert device hass.config.components.add("recorder") assert await async_setup_component(hass, "logbook", {}) events = mock_humanify( hass, [ MockRow( ZWAVE_JS_NOTIFICATION_EVENT, { "device_id": device.id, "command_class": CommandClass.NOTIFICATION.value, "command_class_name": "Notification", "label": "label", "event_label": "event_label", }, ), MockRow( ZWAVE_JS_NOTIFICATION_EVENT, { "device_id": device.id, "command_class": CommandClass.ENTRY_CONTROL.value, "command_class_name": "Entry Control", "event_type": 1, "data_type": 2, }, ), MockRow( ZWAVE_JS_NOTIFICATION_EVENT, { "device_id": device.id, "command_class": CommandClass.SWITCH_MULTILEVEL.value, "command_class_name": "Multilevel Switch", "event_type": 1, "direction": "up", }, ), MockRow( ZWAVE_JS_NOTIFICATION_EVENT, { "device_id": device.id, "command_class": CommandClass.POWERLEVEL.value, "command_class_name": "Powerlevel", }, ), ], ) assert events[0]["name"] == "Touchscreen Deadbolt" assert events[0]["domain"] == "zwave_js" assert ( events[0]["message"] == "fired Notification CC 'notification' event 'label': 'event_label'") assert events[1]["name"] == "Touchscreen Deadbolt" assert events[1]["domain"] == "zwave_js" assert ( events[1]["message"] == "fired Entry Control CC 'notification' event for event type '1' with data type '2'" ) assert events[2]["name"] == "Touchscreen Deadbolt" assert events[2]["domain"] == "zwave_js" assert ( events[2]["message"] == "fired Multilevel Switch CC 'notification' event for event type '1': 'up'" ) assert events[3]["name"] == "Touchscreen Deadbolt" assert events[3]["domain"] == "zwave_js" assert events[3]["message"] == "fired Powerlevel CC 'notification' event"
async def test_actions( hass: HomeAssistant, client: Client, climate_radio_thermostat_ct100_plus: Node, integration: ConfigEntry, ) -> None: """Test actions.""" node = climate_radio_thermostat_ct100_plus device_id = get_device_id(client, node) dev_reg = device_registry.async_get(hass) device = dev_reg.async_get_device({device_id}) assert device assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": { "platform": "event", "event_type": "test_event_refresh_value", }, "action": { "domain": DOMAIN, "type": "refresh_value", "device_id": device.id, "entity_id": "climate.z_wave_thermostat", }, }, { "trigger": { "platform": "event", "event_type": "test_event_ping", }, "action": { "domain": DOMAIN, "type": "ping", "device_id": device.id, }, }, { "trigger": { "platform": "event", "event_type": "test_event_set_value", }, "action": { "domain": DOMAIN, "type": "set_value", "device_id": device.id, "command_class": 112, "property": 1, "value": 1, }, }, { "trigger": { "platform": "event", "event_type": "test_event_set_config_parameter", }, "action": { "domain": DOMAIN, "type": "set_config_parameter", "device_id": device.id, "parameter": 1, "bitmask": None, "subtype": "2-112-0-3 (Beeper)", "value": 1, }, }, ] }, ) with patch("zwave_js_server.model.node.Node.async_poll_value") as mock_call: hass.bus.async_fire("test_event_refresh_value") await hass.async_block_till_done() mock_call.assert_called_once() args = mock_call.call_args_list[0][0] assert len(args) == 1 assert args[0].value_id == "13-64-1-mode" with patch("zwave_js_server.model.node.Node.async_ping") as mock_call: hass.bus.async_fire("test_event_ping") await hass.async_block_till_done() mock_call.assert_called_once() args = mock_call.call_args_list[0][0] assert len(args) == 0 with patch("zwave_js_server.model.node.Node.async_set_value") as mock_call: hass.bus.async_fire("test_event_set_value") await hass.async_block_till_done() mock_call.assert_called_once() args = mock_call.call_args_list[0][0] assert len(args) == 2 assert args[0] == "13-112-0-1" assert args[1] == 1 with patch( "homeassistant.components.zwave_js.services.async_set_config_parameter" ) as mock_call: hass.bus.async_fire("test_event_set_config_parameter") await hass.async_block_till_done() mock_call.assert_called_once() args = mock_call.call_args_list[0][0] assert len(args) == 3 assert args[0].node_id == 13 assert args[1] == 1 assert args[2] == 1
async def async_attach_trigger( hass: HomeAssistant, config: ConfigType, action: AutomationActionType, automation_info: AutomationTriggerInfo, *, platform_type: str = PLATFORM_TYPE, ) -> CALLBACK_TYPE: """Listen for state changes based on configuration.""" dev_reg = dr.async_get(hass) nodes = async_get_nodes_from_targets(hass, config, dev_reg=dev_reg) if config[ATTR_EVENT_SOURCE] == "node" and not nodes: raise ValueError( f"No nodes found for given {ATTR_DEVICE_ID}s or {ATTR_ENTITY_ID}s." ) event_source = config[ATTR_EVENT_SOURCE] event_name = config[ATTR_EVENT] event_data_filter = config.get(ATTR_EVENT_DATA, {}) unsubs = [] job = HassJob(action) trigger_data = automation_info["trigger_data"] @callback def async_on_event(event_data: dict, device: dr.DeviceEntry | None = None) -> None: """Handle event.""" for key, val in event_data_filter.items(): if key not in event_data: return if (config[ATTR_PARTIAL_DICT_MATCH] and isinstance(event_data[key], dict) and isinstance(event_data_filter[key], dict)): for key2, val2 in event_data_filter[key].items(): if key2 not in event_data[ key] or event_data[key][key2] != val2: return continue if event_data[key] != val: return payload = { **trigger_data, CONF_PLATFORM: platform_type, ATTR_EVENT_SOURCE: event_source, ATTR_EVENT: event_name, ATTR_EVENT_DATA: event_data, } primary_desc = f"Z-Wave JS '{event_source}' event '{event_name}' was emitted" if device: device_name = device.name_by_user or device.name payload[ATTR_DEVICE_ID] = device.id home_and_node_id = get_home_and_node_id_from_device_entry(device) assert home_and_node_id payload[ATTR_NODE_ID] = home_and_node_id[1] payload["description"] = f"{primary_desc} on {device_name}" else: payload["description"] = primary_desc payload[ "description"] = f"{payload['description']} with event data: {event_data}" hass.async_run_hass_job(job, {"trigger": payload}) if not nodes: entry_id = config[ATTR_CONFIG_ENTRY_ID] client: Client = hass.data[DOMAIN][entry_id][DATA_CLIENT] assert client.driver if event_source == "controller": unsubs.append( client.driver.controller.on(event_name, async_on_event)) else: unsubs.append(client.driver.on(event_name, async_on_event)) for node in nodes: driver = node.client.driver assert driver is not None # The node comes from the driver. device_identifier = get_device_id(driver, node) device = dev_reg.async_get_device({device_identifier}) assert device # We need to store the device for the callback unsubs.append( node.on(event_name, functools.partial(async_on_event, device=device))) @callback def async_remove() -> None: """Remove state listeners async.""" for unsub in unsubs: unsub() unsubs.clear() return async_remove
async def test_multicast_set_value( hass, client, climate_danfoss_lc_13, climate_eurotronic_spirit_z, integration, ): """Test multicast_set_value service.""" # Test successful multicast call await hass.services.async_call( DOMAIN, SERVICE_MULTICAST_SET_VALUE, { ATTR_ENTITY_ID: [ CLIMATE_DANFOSS_LC13_ENTITY, CLIMATE_EUROTRONICS_SPIRIT_Z_ENTITY, ], ATTR_COMMAND_CLASS: 67, ATTR_PROPERTY: "setpoint", ATTR_PROPERTY_KEY: 1, ATTR_VALUE: 2, }, blocking=True, ) assert len(client.async_send_command.call_args_list) == 1 args = client.async_send_command.call_args[0][0] assert args["command"] == "multicast_group.set_value" assert args["nodeIDs"] == [ climate_eurotronic_spirit_z.node_id, climate_danfoss_lc_13.node_id, ] assert args["valueId"] == { "commandClass": 67, "property": "setpoint", "propertyKey": 1, } assert args["value"] == 2 client.async_send_command.reset_mock() # Test successful multicast call with hex value await hass.services.async_call( DOMAIN, SERVICE_MULTICAST_SET_VALUE, { ATTR_ENTITY_ID: [ CLIMATE_DANFOSS_LC13_ENTITY, CLIMATE_EUROTRONICS_SPIRIT_Z_ENTITY, ], ATTR_COMMAND_CLASS: 67, ATTR_PROPERTY: "setpoint", ATTR_PROPERTY_KEY: 1, ATTR_VALUE: "0x2", }, blocking=True, ) assert len(client.async_send_command.call_args_list) == 1 args = client.async_send_command.call_args[0][0] assert args["command"] == "multicast_group.set_value" assert args["nodeIDs"] == [ climate_eurotronic_spirit_z.node_id, climate_danfoss_lc_13.node_id, ] assert args["valueId"] == { "commandClass": 67, "property": "setpoint", "propertyKey": 1, } assert args["value"] == 2 client.async_send_command.reset_mock() # Test using area ID dev_reg = async_get_dev_reg(hass) device_eurotronic = dev_reg.async_get_device( {get_device_id(client, climate_eurotronic_spirit_z)}) assert device_eurotronic device_danfoss = dev_reg.async_get_device( {get_device_id(client, climate_danfoss_lc_13)}) assert device_danfoss area_reg = async_get_area_reg(hass) area = area_reg.async_get_or_create("test") dev_reg.async_update_device(device_eurotronic.id, area_id=area.id) dev_reg.async_update_device(device_danfoss.id, area_id=area.id) await hass.services.async_call( DOMAIN, SERVICE_MULTICAST_SET_VALUE, { ATTR_AREA_ID: area.id, ATTR_COMMAND_CLASS: 67, ATTR_PROPERTY: "setpoint", ATTR_PROPERTY_KEY: 1, ATTR_VALUE: "0x2", }, blocking=True, ) assert len(client.async_send_command.call_args_list) == 1 args = client.async_send_command.call_args[0][0] assert args["command"] == "multicast_group.set_value" assert args["nodeIDs"] == [ climate_eurotronic_spirit_z.node_id, climate_danfoss_lc_13.node_id, ] assert args["valueId"] == { "commandClass": 67, "property": "setpoint", "propertyKey": 1, } assert args["value"] == 2 client.async_send_command.reset_mock() # Test groups get expanded for multicast call assert await async_setup_component(hass, "group", {}) await Group.async_create_group( hass, "test", [CLIMATE_DANFOSS_LC13_ENTITY, CLIMATE_EUROTRONICS_SPIRIT_Z_ENTITY]) await hass.services.async_call( DOMAIN, SERVICE_MULTICAST_SET_VALUE, { ATTR_ENTITY_ID: "group.test", ATTR_COMMAND_CLASS: 67, ATTR_PROPERTY: "setpoint", ATTR_PROPERTY_KEY: 1, ATTR_VALUE: "0x2", }, blocking=True, ) assert len(client.async_send_command.call_args_list) == 1 args = client.async_send_command.call_args[0][0] assert args["command"] == "multicast_group.set_value" assert args["nodeIDs"] == [ climate_eurotronic_spirit_z.node_id, climate_danfoss_lc_13.node_id, ] assert args["valueId"] == { "commandClass": 67, "property": "setpoint", "propertyKey": 1, } assert args["value"] == 2 client.async_send_command.reset_mock() # Test successful broadcast call await hass.services.async_call( DOMAIN, SERVICE_MULTICAST_SET_VALUE, { ATTR_BROADCAST: True, ATTR_COMMAND_CLASS: 67, ATTR_PROPERTY: "setpoint", ATTR_PROPERTY_KEY: 1, ATTR_VALUE: 2, }, blocking=True, ) assert len(client.async_send_command.call_args_list) == 1 args = client.async_send_command.call_args[0][0] assert args["command"] == "broadcast_node.set_value" assert args["valueId"] == { "commandClass": 67, "property": "setpoint", "propertyKey": 1, } assert args["value"] == 2 client.async_send_command.reset_mock() # Test sending one node without broadcast uses the node.set_value command instead await hass.services.async_call( DOMAIN, SERVICE_MULTICAST_SET_VALUE, { ATTR_ENTITY_ID: CLIMATE_DANFOSS_LC13_ENTITY, ATTR_COMMAND_CLASS: 67, ATTR_PROPERTY: "setpoint", ATTR_PROPERTY_KEY: 1, ATTR_VALUE: 2, }, blocking=True, ) assert len(client.async_send_command_no_wait.call_args_list) == 1 args = client.async_send_command_no_wait.call_args[0][0] assert args["command"] == "node.set_value" client.async_send_command_no_wait.reset_mock() # Test no device, entity, or broadcast flag raises error with pytest.raises(vol.Invalid): await hass.services.async_call( DOMAIN, SERVICE_MULTICAST_SET_VALUE, { ATTR_COMMAND_CLASS: 67, ATTR_PROPERTY: "setpoint", ATTR_PROPERTY_KEY: 1, ATTR_VALUE: 2, }, blocking=True, ) # Test that when a command fails we raise an exception client.async_send_command.return_value = {"success": False} with pytest.raises(SetValueFailed): await hass.services.async_call( DOMAIN, SERVICE_MULTICAST_SET_VALUE, { ATTR_ENTITY_ID: [ CLIMATE_DANFOSS_LC13_ENTITY, CLIMATE_EUROTRONICS_SPIRIT_Z_ENTITY, ], ATTR_COMMAND_CLASS: 67, ATTR_PROPERTY: "setpoint", ATTR_PROPERTY_KEY: 1, ATTR_VALUE: 2, }, blocking=True, ) # Create a fake node with a different home ID from a real node and patch it into # return of helper function to check the validation for two nodes having different # home IDs diff_network_node = MagicMock() diff_network_node.client.driver.controller.home_id.return_value = "diff_home_id" with pytest.raises(vol.MultipleInvalid), patch( "homeassistant.components.zwave_js.helpers.async_get_node_from_device_id", side_effect=(climate_danfoss_lc_13, diff_network_node), ): await hass.services.async_call( DOMAIN, SERVICE_MULTICAST_SET_VALUE, { ATTR_ENTITY_ID: [ CLIMATE_DANFOSS_LC13_ENTITY, ], ATTR_DEVICE_ID: "fake_device_id", ATTR_COMMAND_CLASS: 67, ATTR_PROPERTY: "setpoint", ATTR_PROPERTY_KEY: 1, ATTR_VALUE: 2, }, blocking=True, ) # Test that when there are multiple zwave_js config entries, service will fail # without devices or entities new_entry = MockConfigEntry(domain=DOMAIN) new_entry.add_to_hass(hass) with pytest.raises(vol.Invalid): await hass.services.async_call( DOMAIN, SERVICE_MULTICAST_SET_VALUE, { ATTR_BROADCAST: True, ATTR_COMMAND_CLASS: 67, ATTR_PROPERTY: "setpoint", ATTR_PROPERTY_KEY: 1, ATTR_VALUE: 2, }, blocking=True, )
async def test_ping( hass, client, climate_danfoss_lc_13, climate_radio_thermostat_ct100_plus_different_endpoints, integration, ): """Test ping service.""" dev_reg = async_get_dev_reg(hass) device_radio_thermostat = dev_reg.async_get_device({ get_device_id(client, climate_radio_thermostat_ct100_plus_different_endpoints) }) assert device_radio_thermostat device_danfoss = dev_reg.async_get_device( {get_device_id(client, climate_danfoss_lc_13)}) assert device_danfoss client.async_send_command.return_value = {"responded": True} # Test successful ping call await hass.services.async_call( DOMAIN, SERVICE_PING, { ATTR_ENTITY_ID: [ CLIMATE_DANFOSS_LC13_ENTITY, CLIMATE_RADIO_THERMOSTAT_ENTITY, ], }, blocking=True, ) assert len(client.async_send_command.call_args_list) == 2 args = client.async_send_command.call_args_list[0][0][0] assert args["command"] == "node.ping" assert (args["nodeId"] == climate_radio_thermostat_ct100_plus_different_endpoints.node_id) args = client.async_send_command.call_args_list[1][0][0] assert args["command"] == "node.ping" assert args["nodeId"] == climate_danfoss_lc_13.node_id client.async_send_command.reset_mock() # Test successful ping call with devices await hass.services.async_call( DOMAIN, SERVICE_PING, { ATTR_DEVICE_ID: [ device_radio_thermostat.id, device_danfoss.id, ], }, blocking=True, ) assert len(client.async_send_command.call_args_list) == 2 args = client.async_send_command.call_args_list[0][0][0] assert args["command"] == "node.ping" assert (args["nodeId"] == climate_radio_thermostat_ct100_plus_different_endpoints.node_id) args = client.async_send_command.call_args_list[1][0][0] assert args["command"] == "node.ping" assert args["nodeId"] == climate_danfoss_lc_13.node_id client.async_send_command.reset_mock() # Test successful ping call with area area_reg = async_get_area_reg(hass) area = area_reg.async_get_or_create("test") dev_reg.async_update_device(device_radio_thermostat.id, area_id=area.id) dev_reg.async_update_device(device_danfoss.id, area_id=area.id) await hass.services.async_call( DOMAIN, SERVICE_PING, {ATTR_AREA_ID: area.id}, blocking=True, ) assert len(client.async_send_command.call_args_list) == 2 args = client.async_send_command.call_args_list[0][0][0] assert args["command"] == "node.ping" assert (args["nodeId"] == climate_radio_thermostat_ct100_plus_different_endpoints.node_id) args = client.async_send_command.call_args_list[1][0][0] assert args["command"] == "node.ping" assert args["nodeId"] == climate_danfoss_lc_13.node_id client.async_send_command.reset_mock() # Test groups get expanded for multicast call assert await async_setup_component(hass, "group", {}) await Group.async_create_group( hass, "test", [CLIMATE_DANFOSS_LC13_ENTITY, CLIMATE_RADIO_THERMOSTAT_ENTITY]) await hass.services.async_call( DOMAIN, SERVICE_PING, { ATTR_ENTITY_ID: "group.test", }, blocking=True, ) assert len(client.async_send_command.call_args_list) == 2 args = client.async_send_command.call_args_list[0][0][0] assert args["command"] == "node.ping" assert (args["nodeId"] == climate_radio_thermostat_ct100_plus_different_endpoints.node_id) args = client.async_send_command.call_args_list[1][0][0] assert args["command"] == "node.ping" assert args["nodeId"] == climate_danfoss_lc_13.node_id client.async_send_command.reset_mock() # Test no device or entity raises error with pytest.raises(vol.Invalid): await hass.services.async_call( DOMAIN, SERVICE_PING, {}, blocking=True, )
async def test_device_diagnostics_missing_primary_value( hass, client, multisensor_6, integration, hass_client, ): """Test that the device diagnostics handles an entity with a missing primary value.""" dev_reg = async_get_dev_reg(hass) device = dev_reg.async_get_device( {get_device_id(client.driver, multisensor_6)}) assert device entity_id = "sensor.multisensor_6_air_temperature" ent_reg = async_get_ent_reg(hass) entry = ent_reg.async_get(entity_id) # check that the primary value for the entity exists in the diagnostics diagnostics_data = await get_diagnostics_for_device( hass, hass_client, integration, device) value = multisensor_6.values.get( get_value_id_from_unique_id(entry.unique_id)) assert value air_entity = next(x for x in diagnostics_data["entities"] if x["entity_id"] == entity_id) assert air_entity["primary_value"] == { "command_class": value.command_class, "command_class_name": value.command_class_name, "endpoint": value.endpoint, "property": value.property_, "property_name": value.property_name, "property_key": value.property_key, "property_key_name": value.property_key_name, } # make the entity's primary value go missing event = Event( type="value removed", data={ "source": "node", "event": "value removed", "nodeId": multisensor_6.node_id, "args": { "commandClassName": value.command_class_name, "commandClass": value.command_class, "endpoint": value.endpoint, "property": value.property_, "prevValue": 0, "propertyName": value.property_name, }, }, ) multisensor_6.receive_event(event) diagnostics_data = await get_diagnostics_for_device( hass, hass_client, integration, device) air_entity = next(x for x in diagnostics_data["entities"] if x["entity_id"] == entity_id) assert air_entity["primary_value"] is None
async def test_lock_actions( hass: HomeAssistant, client: Client, lock_schlage_be469: Node, integration: ConfigEntry, ) -> None: """Test actions for locks.""" node = lock_schlage_be469 device_id = get_device_id(client, node) dev_reg = device_registry.async_get(hass) device = dev_reg.async_get_device({device_id}) assert device assert await async_setup_component( hass, automation.DOMAIN, { automation.DOMAIN: [ { "trigger": { "platform": "event", "event_type": "test_event_clear_lock_usercode", }, "action": { "domain": DOMAIN, "type": "clear_lock_usercode", "device_id": device.id, "entity_id": "lock.touchscreen_deadbolt", "code_slot": 1, }, }, { "trigger": { "platform": "event", "event_type": "test_event_set_lock_usercode", }, "action": { "domain": DOMAIN, "type": "set_lock_usercode", "device_id": device.id, "entity_id": "lock.touchscreen_deadbolt", "code_slot": 1, "usercode": "1234", }, }, ] }, ) with patch("homeassistant.components.zwave_js.lock.clear_usercode") as mock_call: hass.bus.async_fire("test_event_clear_lock_usercode") await hass.async_block_till_done() mock_call.assert_called_once() args = mock_call.call_args_list[0][0] assert len(args) == 2 assert args[0].node_id == node.node_id assert args[1] == 1 with patch("homeassistant.components.zwave_js.lock.set_usercode") as mock_call: hass.bus.async_fire("test_event_set_lock_usercode") await hass.async_block_till_done() mock_call.assert_called_once() args = mock_call.call_args_list[0][0] assert len(args) == 3 assert args[0].node_id == node.node_id assert args[1] == 1 assert args[2] == "1234"
async def async_attach_trigger( hass: HomeAssistant, config: ConfigType, action: Callable, automation_info: dict[str, Any], *, platform_type: str = PLATFORM_TYPE, ) -> CALLBACK_TYPE: """Listen for state changes based on configuration.""" nodes: set[Node] = set() if ATTR_DEVICE_ID in config: nodes.update({ async_get_node_from_device_id(hass, device_id) for device_id in config.get(ATTR_DEVICE_ID, []) }) if ATTR_ENTITY_ID in config: nodes.update({ async_get_node_from_entity_id(hass, entity_id) for entity_id in config.get(ATTR_ENTITY_ID, []) }) from_value = config[ATTR_FROM] to_value = config[ATTR_TO] command_class = config[ATTR_COMMAND_CLASS] property_ = config[ATTR_PROPERTY] endpoint = config.get(ATTR_ENDPOINT) property_key = config.get(ATTR_PROPERTY_KEY) unsubs = [] job = HassJob(action) trigger_data: dict = {} if automation_info: trigger_data = automation_info.get("trigger_data", {}) @callback def async_on_value_updated(value: Value, device: dr.DeviceEntry, event: Event) -> None: """Handle value update.""" event_value: Value = event["value"] if event_value != value: return # Get previous value and its state value if it exists prev_value_raw = event["args"]["prevValue"] prev_value = value.metadata.states.get(str(prev_value_raw), prev_value_raw) # Get current value and its state value if it exists curr_value_raw = event["args"]["newValue"] curr_value = value.metadata.states.get(str(curr_value_raw), curr_value_raw) # Check from and to values against previous and current values respectively for value_to_eval, raw_value_to_eval, match in ( (prev_value, prev_value_raw, from_value), (curr_value, curr_value_raw, to_value), ): if (match != MATCH_ALL and value_to_eval != match and not (isinstance(match, list) and (value_to_eval in match or raw_value_to_eval in match)) and raw_value_to_eval != match): return device_name = device.name_by_user or device.name payload = { **trigger_data, CONF_PLATFORM: platform_type, ATTR_DEVICE_ID: device.id, ATTR_NODE_ID: value.node.node_id, ATTR_COMMAND_CLASS: value.command_class, ATTR_COMMAND_CLASS_NAME: value.command_class_name, ATTR_PROPERTY: value.property_, ATTR_PROPERTY_NAME: value.property_name, ATTR_ENDPOINT: endpoint, ATTR_PROPERTY_KEY: value.property_key, ATTR_PROPERTY_KEY_NAME: value.property_key_name, ATTR_PREVIOUS_VALUE: prev_value, ATTR_PREVIOUS_VALUE_RAW: prev_value_raw, ATTR_CURRENT_VALUE: curr_value, ATTR_CURRENT_VALUE_RAW: curr_value_raw, "description": f"Z-Wave value {value_id} updated on {device_name}", } hass.async_run_hass_job(job, {"trigger": payload}) dev_reg = dr.async_get(hass) for node in nodes: device_identifier = get_device_id(node.client, node) device = dev_reg.async_get_device({device_identifier}) assert device value_id = get_value_id(node, command_class, property_, endpoint, property_key) value = node.values[value_id] # We need to store the current value and device for the callback unsubs.append( node.on( "value updated", functools.partial(async_on_value_updated, value, device), )) @callback def async_remove() -> None: """Remove state listeners async.""" for unsub in unsubs: unsub() unsubs.clear() return async_remove