def test_simple_descriptor(): simple_descriptor = zigpy.zdo.types.SimpleDescriptor() simple_descriptor.endpoint = zigpy_t.uint8_t(1) simple_descriptor.profile = zigpy_t.uint16_t(260) simple_descriptor.device_type = zigpy_t.uint16_t(257) simple_descriptor.device_version = zigpy_t.uint8_t(0) simple_descriptor.input_clusters = zigpy_t.LVList( t.uint16_t)([0, 3, 4, 5, 6, 8, 2821, 1794]) simple_descriptor.output_clusters = zigpy_t.LVList(t.uint16_t)([10, 25]) c1 = c.ZDOCommands.SimpleDescRsp.Callback( Src=t.NWK(0x1234), Status=t.ZDOStatus.SUCCESS, NWK=t.NWK(0x1234), SimpleDescriptor=simple_descriptor, ) sp_simple_descriptor = zigpy.zdo.types.SizePrefixedSimpleDescriptor() sp_simple_descriptor.endpoint = zigpy_t.uint8_t(1) sp_simple_descriptor.profile = zigpy_t.uint16_t(260) sp_simple_descriptor.device_type = zigpy_t.uint16_t(257) sp_simple_descriptor.device_version = zigpy_t.uint8_t(0) sp_simple_descriptor.input_clusters = zigpy_t.LVList( t.uint16_t)([0, 3, 4, 5, 6, 8, 2821, 1794]) sp_simple_descriptor.output_clusters = zigpy_t.LVList(t.uint16_t)([10, 25]) c2 = c.ZDOCommands.SimpleDescRsp.Callback( Src=t.NWK(0x1234), Status=t.ZDOStatus.SUCCESS, NWK=t.NWK(0x1234), SimpleDescriptor=sp_simple_descriptor, ) assert c1.to_frame() == c2.to_frame()
def handle_simple_desc_req(self, addr, endpoint): # simulate a dummy endpoint with no clusters sd = types.SizePrefixedSimpleDescriptor() sd.endpoint = endpoint sd.profile = t.uint16_t(0x0104) # Home Automation sd.device_type = t.uint16_t(0x0005) # Configuration Tool sd.device_version = t.uint8_t(0) sd.input_clusters = t.LVList(t.uint16_t)([]) # no input clusters sd.output_clusters = t.LVList(t.uint16_t)([]) # no output clusters self.reply(0x8004, 0, self._device.application.nwk, sd)
def serialize(self): r = ReportingDirection(self.direction).serialize() r += t.uint16_t(self.attrid).serialize() if self.direction == ReportingDirection.ReceiveReports: r += t.uint16_t(self.timeout).serialize() else: r += t.uint8_t(self.datatype).serialize() r += t.uint16_t(self.min_interval).serialize() r += t.uint16_t(self.max_interval).serialize() datatype = DATA_TYPES.get(self.datatype, None) if datatype and datatype[2] is Analog: datatype = datatype[1] r += datatype(self.reportable_change).serialize() return r
def serialize(self): r = t.uint16_t(self.attrid).serialize() r += t.uint8_t(self.status).serialize() if self.status == 0: r += self.value.serialize() return r
async def unbind_group(app, listener, ieee, cmd, data, service): from zigpy.zdo.types import MultiAddress from zigpy import types as t LOGGER.debug("running 'unbind group' command: %s", service) if ieee is None or not data: LOGGER.error("missing ieee") return src_dev = app.get_device(ieee=ieee) group_id = int(data, base=16) zdo = src_dev.zdo src_cls = [6, 8, 768] dst_addr = MultiAddress() dst_addr.addrmode = t.uint8_t(1) dst_addr.nwk = t.uint16_t(group_id) for src_cluster in src_cls: src_ep = None for ep_id, ep in src_dev.endpoints.items(): if ep_id == 0: continue if src_cluster in ep.out_clusters: src_ep = ep_id break if not src_ep: LOGGER.debug("0x%04x: skipping %s cluster as non present", src_dev.nwk, src_cluster) continue LOGGER.debug("0x%04x: unbinding %s, ep: %s, cluster: %s", src_dev.nwk, str(src_dev.ieee), src_ep, src_cluster) res = await zdo.request(ZDOCmd.Unbind_req, src_dev.ieee, src_ep, src_cluster, dst_addr) LOGGER.debug("0x%04x: unbinding group 0x%04x: %s", src_dev.nwk, group_id, res)
async def unbind_group(app, listener, ieee, cmd, data, service, params, event_data): from zigpy.zdo.types import MultiAddress from zigpy import types as t LOGGER.debug("running 'unbind group' command: %s", service) if ieee is None: LOGGER.error("missing ieee") return if not data: LOGGER.error("missing data (destination ieee)") return src_dev = app.get_device(ieee=ieee) group_id = u.str2int(data) zdo = src_dev.zdo src_cls = [6, 8, 768] dst_addr = MultiAddress() dst_addr.addrmode = t.uint8_t(1) dst_addr.nwk = t.uint16_t(group_id) results = {} for src_cluster in src_cls: src_ep = None for ep_id, ep in src_dev.endpoints.items(): if ep_id == 0: continue if src_cluster in ep.out_clusters: src_ep = ep_id break if not src_ep: LOGGER.debug( "0x%04x: skipping %s cluster as non present", src_dev.nwk, src_cluster, ) continue if src_ep not in results: results[src_ep] = [] LOGGER.debug( "0x%04x: unbinding %s, ep: %s, cluster: %s", src_dev.nwk, str(src_dev.ieee), src_ep, src_cluster, ) unbind_result = {"endpoint_id": src_ep, "cluster_id": src_cluster} res = await zdo.request(ZDOCmd.Unbind_req, src_dev.ieee, src_ep, src_cluster, dst_addr) unbind_result["result"] = res results[src_ep].append(unbind_result) LOGGER.debug("0x%04x: unbinding group 0x%04x: %s", src_dev.nwk, group_id, res) event_data["result"] = results
def write_attributes(self, attributes, is_report=False): args = [] for attrid, value in attributes.items(): if isinstance(attrid, str): attrid = self._attridx[attrid] if attrid not in self.attributes: self.error("%d is not a valid attribute id", attrid) continue if is_report: a = foundation.ReadAttributeRecord() a.status = 0 else: a = foundation.Attribute() a.attrid = t.uint16_t(attrid) a.value = foundation.TypeValue() try: python_type = self.attributes[attrid][1] a.value.type = t.uint8_t(foundation.DATA_TYPE_IDX[python_type]) a.value.value = python_type(value) args.append(a) except ValueError as e: self.error(str(e)) if is_report: schema = foundation.COMMANDS[0x01][1] return self.reply(0x01, schema, args) else: schema = foundation.COMMANDS[0x02][1] return self.request(True, 0x02, schema, args)
def __init__(self, command_id: t.uint16_t = 0x0000, tsn: t.uint8_t = 0) -> None: try: self._command_id = ZDOCmd(command_id) except ValueError: self._command_id = t.uint16_t(command_id) self._tsn = t.uint8_t(tsn)
def command_id(self, value: t.uint16_t) -> None: """Command ID setter.""" try: self._command_id = ZDOCmd(value) return except ValueError: pass self._command_id = t.uint16_t(value)
async def read_attributes_raw(self, attributes, manufacturer=None): schema = foundation.COMMANDS[0x00][1] attributes = [t.uint16_t(a) for a in attributes] v = await self.request(True, 0x00, schema, attributes, manufacturer=manufacturer) return v
async def test_mija_battery(zigpy_device_from_quirk, voltage, bpr): """Test xiaomi batter voltage to % battery left.""" data_1 = b"\x1c4\x12\x02\n\x02\xffL\x06\x00\x10\x01!" data_2 = b"!\xa8\x01$\x00\x00\x00\x00\x00!n\x00 P" device = zigpy_device_from_quirk(zhaquirks.xiaomi.mija.motion.Motion) device.handle_message(0x260, 0x0000, 1, 1, data_1 + t.uint16_t(voltage).serialize() + data_2) power_cluster = device.endpoints[1].power assert power_cluster["battery_percentage_remaining"] == bpr
def _parse_packed(cls, packed: t.uint8_t) -> dict[str, typing.Any]: data = 18 * b"\x00" + t.uint16_t(packed).serialize() + 3 * b"\x00" tmp_neighbor, _ = cls.deserialize(data) return { "device_type": tmp_neighbor.device_type, "rx_on_when_idle": tmp_neighbor.rx_on_when_idle, "relationship": tmp_neighbor.relationship, "reserved1": tmp_neighbor.reserved1, }
def _custom_endpoint_init(self, node_config, *argv): """set node_config based obn Lumi device_type.""" config = {} selector = node_config.get('template', None) if not selector: selector = argv[0] _LOGGER.debug(" selector: %s", selector) config = { "config_report": [ [0xfc02, 0x0010, 1, 1800, t.uint8_t(1), 0x1241], [0xfc02, 0x0012, 1, 1800, t.uint16_t(1), 0x1241], [0xfc02, 0x0013, 1, 1800, t.uint16_t(1), 0x1241], [0xfc02, 0x0014, 1, 1800, t.uint16_t(1), 0x1241], ], "in_cluster": [0x0000, 0x0402, 0x0500, 0xfc02], "out_cluster": [], "type": "binary_sensor", } node_config.update(config)
async def _async_group_binding_operation( self, group_id: int, operation: zdo_types.ZDOCmd, cluster_bindings: list[ClusterBinding], ) -> None: """Create or remove a direct zigbee binding between a device and a group.""" zdo = self._zigpy_device.zdo op_msg = "0x%04x: %s %s, ep: %s, cluster: %s to group: 0x%04x" destination_address = zdo_types.MultiAddress() destination_address.addrmode = types.uint8_t(1) destination_address.nwk = types.uint16_t(group_id) tasks = [] for cluster_binding in cluster_bindings: if cluster_binding.endpoint_id == 0: continue if ( cluster_binding.id in self._zigpy_device.endpoints[ cluster_binding.endpoint_id ].out_clusters ): op_params = ( self.nwk, operation.name, str(self.ieee), cluster_binding.endpoint_id, cluster_binding.id, group_id, ) zdo.debug(f"processing {op_msg}", *op_params) tasks.append( ( zdo.request( operation, self.ieee, cluster_binding.endpoint_id, cluster_binding.id, destination_address, ), op_msg, op_params, ) ) res = await asyncio.gather(*(t[0] for t in tasks), return_exceptions=True) for outcome, log_msg in zip(res, tasks): if isinstance(outcome, Exception): fmt = f"{log_msg[1]} failed: %s" else: fmt = f"{log_msg[1]} completed: %s" zdo.debug(fmt, *(log_msg[2] + (outcome,)))
def _device(new=False, zdo_init=False, nwk=t.uint16_t(0x1234)): from zigpy.device import Device, Status as DeviceStatus ieee, _ = t.EUI64.deserialize(b"\x08\x07\x06\x05\x04\x03\x02\x01") dev = Device(app, ieee, nwk) if new: dev.status = DeviceStatus.NEW elif zdo_init: dev.status = DeviceStatus.ZDO_INIT else: dev.status = DeviceStatus.ENDPOINTS_INIT return dev
async def test_xiaomi_battery(zigpy_device_from_quirk, voltage, bpr): """Test xiaomi batter voltage to % battery left.""" data_1 = b'\x1c_\x11I\n\x01\xffB"\x01!' data_2 = ( b"\x03(\r\x04!\xa8\x13\x05!\xcb\x00\x06$\x01\x00\x00\x00\x00\x08!\x04\x02\n!" b"\x00\x00d\x10\x00") device = zigpy_device_from_quirk( zhaquirks.xiaomi.aqara.vibration_aq1.VibrationAQ1) device.handle_message(0x260, 0x0000, 1, 1, data_1 + t.uint16_t(voltage).serialize() + data_2) power_cluster = device.endpoints[1].power assert power_cluster["battery_percentage_remaining"] == bpr
def test_size_prefixed_simple_descriptor(): sd = types.SizePrefixedSimpleDescriptor() sd.endpoint = t.uint8_t(1) sd.profile = t.uint16_t(2) sd.device_type = t.uint16_t(3) sd.device_version = t.uint8_t(4) sd.input_clusters = t.LVList(t.uint16_t)([t.uint16_t(5), t.uint16_t(6)]) sd.output_clusters = t.LVList(t.uint16_t)([t.uint16_t(7), t.uint16_t(8)]) ser = sd.serialize() assert ser[0] == len(ser) - 1 sd2, data = types.SizePrefixedSimpleDescriptor.deserialize(ser) assert sd.input_clusters == sd2.input_clusters assert sd.output_clusters == sd2.output_clusters
def data(tag_id, payload=b""): r = t.uint16_t(tag_id).serialize() r += t.uint32_t(len(payload)).serialize() return r + payload
async def read_attributes_raw(self, attributes, manufacturer=None): """Prevent remote reads.""" attributes = [types.uint16_t(a) for a in attributes] values = [self._attr_cache.get(attr) for attr in attributes] return values
def __post_init__(self): if not isinstance(self.id, t.uint16_t): object.__setattr__(self, "id", t.uint16_t(self.id)) assert self.access in {None, "r", "w", "rw"} ensure_valid_name(self.name)
def read_attributes_raw(self, attributes): schema = foundation.COMMANDS[0x00][1] attributes = [t.uint16_t(a) for a in attributes] v = yield from self.request(True, 0x00, schema, attributes) return v
def manufacturer(self) -> Optional[t.uint16_t]: """Return manufacturer id.""" if self._manufacturer is None: return None return t.uint16_t(self._manufacturer)
async def read_attributes_raw(self, attributes, manufacturer=None): attributes = [types.uint16_t(a) for a in attributes] v = [self._attr_cache.get(attr) for attr in attributes] return v
def read_attributes_raw(self, attributes, manufacturer=None): attributes = [t.uint16_t(a) for a in attributes] return self._read_attributes(attributes, manufacturer=manufacturer)
def _hex_uint16_repr(v: int) -> str: return t.uint16_t(v)._hex_repr()
def serialize(self): r = Status(self.status).serialize() if self.status != Status.SUCCESS: r += t.uint8_t(self.direction).serialize() r += t.uint16_t(self.attrid).serialize() return r
def nwk(): return t.uint16_t(0x0100)
async def attr_write( # noqa: C901 app, listener, ieee, cmd, data, service, params, event_data): success = True dev = app.get_device(ieee=ieee) # Decode endpoint if params[p.EP_ID] is None or params[p.EP_ID] == "": params[p.EP_ID] = u.find_endpoint(dev, params[p.CLUSTER_ID]) if params[p.EP_ID] not in dev.endpoints: msg = f"Endpoint {params[p.EP_ID]} not found for '{ieee!r}" LOGGER.error(msg) raise Exception(msg) if params[p.CLUSTER_ID] not in dev.endpoints[params[p.EP_ID]].in_clusters: msg = "InCluster 0x{:04X} not found for '{}', endpoint {}".format( params[p.CLUSTER_ID], repr(ieee), params[p.EP_ID]) if params[p.CLUSTER_ID] in dev.endpoints[params[p.EP_ID]].out_clusters: msg = f'{cmd}: "Using" OutCluster. {msg}' LOGGER.warning(msg) if "warnings" not in event_data: event_data["warnings"] = [] event_data["warnings"].append(msg) else: LOGGER.error(msg) raise Exception(msg) cluster = dev.endpoints[params[p.EP_ID]].in_clusters[params[p.CLUSTER_ID]] # Prepare read and write lists attr_write_list = [] attr_read_list = [] # Decode attribute(s) # Currently only one attribute is possible, but the parameter # format could allow for multiple attributes for instance by # adding a split character such as ':' for attr_id, attr_type # and attr_value # Then the match should be in a loop # Decode attribute id # Could accept name for attribute, but extra code to check attr_id = params[p.ATTR_ID] attr_read_list.append(attr_id) # Read before write list compare_val = None if cmd == "attr_write": attr_type = params[p.ATTR_TYPE] attr_val_str = params[p.ATTR_VAL] # Type only needed for write if attr_type is None or attr_val_str is None: event_data["errors"].append( "attr_type and attr_val must be set for attr_write") else: # Convert attribute value (provided as a string) # to appropriate attribute value. # If the attr_type is not set, only read the attribute. attr_val = None if attr_type == 0x10: compare_val = u.str2int(attr_val_str) attr_val = f.TypeValue(attr_type, t.Bool(compare_val)) elif attr_type == 0x20: compare_val = u.str2int(attr_val_str) attr_val = f.TypeValue(attr_type, t.uint8_t(compare_val)) elif attr_type == 0x21: compare_val = u.str2int(attr_val_str) attr_val = f.TypeValue(attr_type, t.uint16_t(compare_val)) elif attr_type == 0x22: compare_val = u.str2int(attr_val_str) attr_val = f.TypeValue(attr_type, t.uint24_t(compare_val)) elif attr_type == 0x23: compare_val = u.str2int(attr_val_str) attr_val = f.TypeValue(attr_type, t.uint32_t(compare_val)) elif attr_type == 0x24: compare_val = u.str2int(attr_val_str) attr_val = f.TypeValue(attr_type, t.uint32_t(compare_val)) elif attr_type == 0x25: compare_val = u.str2int(attr_val_str) attr_val = f.TypeValue(attr_type, t.uint48_t(compare_val)) elif attr_type == 0x26: compare_val = u.str2int(attr_val_str) attr_val = f.TypeValue(attr_type, t.uint56_t(compare_val)) elif attr_type == 0x27: compare_val = u.str2int(attr_val_str) attr_val = f.TypeValue(attr_type, t.uint64_t(compare_val)) elif attr_type == 0x28: compare_val = u.str2int(attr_val_str) attr_val = f.TypeValue(attr_type, t.int8_t(compare_val)) elif attr_type == 0x29: compare_val = u.str2int(attr_val_str) attr_val = f.TypeValue(attr_type, t.int16_t(compare_val)) elif attr_type == 0x2A: compare_val = u.str2int(attr_val_str) attr_val = f.TypeValue(attr_type, t.int24_t(compare_val)) elif attr_type == 0x2B: compare_val = u.str2int(attr_val_str) attr_val = f.TypeValue(attr_type, t.int32_t(compare_val)) elif attr_type == 0x2C: compare_val = u.str2int(attr_val_str) attr_val = f.TypeValue(attr_type, t.int32_t(compare_val)) elif attr_type == 0x2D: compare_val = u.str2int(attr_val_str) attr_val = f.TypeValue(attr_type, t.int48_t(compare_val)) elif attr_type == 0x2E: compare_val = u.str2int(attr_val_str) attr_val = f.TypeValue(attr_type, t.int56_t(compare_val)) elif attr_type == 0x2F: compare_val = u.str2int(attr_val_str) attr_val = f.TypeValue(attr_type, t.int64_t(compare_val)) elif attr_type <= 0x31 and attr_type >= 0x08: compare_val = u.str2int(attr_val_str) # uint, int, bool, bitmap and enum attr_val = f.TypeValue(attr_type, t.FixedIntType(compare_val)) elif attr_type in [0x41, 0x42]: # Octet string # Octet string requires length -> LVBytes compare_val = attr_val_str event_data["str"] = attr_val_str if type(attr_val_str) == str: attr_val_str = bytes(attr_val_str, "utf-8") if isinstance(attr_val_str, list): # Convert list to List of uint8_t attr_val_str = t.List[t.uint8_t]( [t.uint8_t(i) for i in attr_val_str]) attr_val = f.TypeValue(attr_type, t.LVBytes(attr_val_str)) if attr_val is not None: attr = f.Attribute(attr_id, value=attr_val) attr_write_list.append(attr) # Write list else: msg = ("attr_type {} not supported, " + "or incorrect parameters (attr_val={})").format( params[p.ATTR_TYPE], params[p.ATTR_VAL]) event_data["errors"].append(msg) LOGGER.debug(msg) LOGGER.debug( "ATTR TYPE %s, attr_val %s", params[p.ATTR_TYPE], params[p.ATTR_VAL], ) result_read = None if (params[p.READ_BEFORE_WRITE] or (len(attr_write_list) == 0) or (cmd != S.ATTR_WRITE)): LOGGER.debug("Request attr read %s", attr_read_list) result_read = await cluster_read_attributes( cluster, attr_read_list, manufacturer=params[p.MANF], tries=params[p.TRIES], ) LOGGER.debug("Reading attr result (attrs, status): %s", result_read) success = (len(result_read[1]) == 0) and (len(result_read[0]) == 1) # True if value that should be written is the equal to the read one write_is_equal = ((params[p.READ_BEFORE_WRITE]) and (len(attr_write_list) != 0) and ((attr_id in result_read[0]) and (result_read[0][attr_id] == compare_val))) event_data["write_is_equal"] = write_is_equal if write_is_equal and (cmd == "attr_write"): event_data["info"] = "Data read is equal to data to write" if (len(attr_write_list) != 0 and (not (params[p.READ_BEFORE_WRITE]) or params[p.WRITE_IF_EQUAL] or not (write_is_equal)) and cmd == "attr_write"): if result_read is not None: event_data["read_before"] = result_read result_read = None LOGGER.debug("Request attr write %s", attr_write_list) result_write = await cluster__write_attributes( cluster, attr_write_list, manufacturer=params[p.MANF], tries=params[p.TRIES], ) LOGGER.debug("Write attr status: %s", result_write) event_data["result_write"] = result_write success = False try: # LOGGER.debug("Write attr status: %s", result_write[0][0].status) success = result_write[0][0].status == f.Status.SUCCESS LOGGER.debug(f"Write success: {success}") except Exception as e: event_data["errors"].append(repr(e)) success = False # success = (len(result_write[1])==0) if params[p.READ_AFTER_WRITE]: LOGGER.debug(f"Request attr read {attr_read_list!r}") result_read = await cluster_read_attributes( cluster, attr_read_list, manufacturer=params[p.MANF], tries=params[p.TRIES], ) LOGGER.debug( f"Reading attr result (attrs, status): {result_read!r}") # read_is_equal = (result_read[0][attr_id] == compare_val) success = (success and (len(result_read[1]) == 0 and len(result_read[0]) == 1) and (result_read[0][attr_id] == compare_val)) if result_read is not None: event_data["result_read"] = result_read if len(result_read[1]) == 0: read_val = result_read[0][attr_id] else: msg = (f"Result: {result_read[1]}" + f" - Attribute {attr_id} not in read {result_read!r}") LOGGER.warning(msg) if "warnings" not in event_data: event_data["warnings"] = [] event_data["warnings"].append(msg) success = False else: read_val = None event_data["success"] = success # Write value to provided state or state attribute if params[p.STATE_ID] is not None: if len(result_read[1]) == 0 and len(result_read[0]) == 1: # No error and one result for id, val in result_read[0].items(): if params[p.STATE_ATTR] is not None: LOGGER.debug( "Set state %s[%s] -> %s from attr_id %s", params[p.STATE_ID], params[p.STATE_ATTR], val, id, ) else: LOGGER.debug( "Set state %s -> %s from attr_id %s", params[p.STATE_ID], val, id, ) u.set_state( listener._hass, params[p.STATE_ID], val, key=params[p.STATE_ATTR], allow_create=params[p.ALLOW_CREATE], ) LOGGER.debug("STATE is set") if success and params[p.CSV_FILE] is not None: fields = [] if params[p.CSV_LABEL] is not None: attr_name = params[p.CSV_LABEL] else: try: attr_name = cluster.attributes.get(attr_id, (str(attr_id), None))[0] except Exception: attr_name = attr_id fields.append(dt_util.utcnow().isoformat()) fields.append(attr_name) fields.append(read_val) fields.append("0x%04X" % (attr_id)), fields.append("0x%04X" % (cluster.cluster_id)), fields.append(cluster.endpoint.endpoint_id) fields.append(str(cluster.endpoint.device.ieee)) fields.append(("0x%04X" % (params[p.MANF])) if params[p.MANF] is not None else "") u.append_to_csvfile( fields, "csv", params[p.CSV_FILE], f"{attr_name}={read_val}", listener=listener, ) importlib.reload(u) if "result_read" in event_data and not u.isJsonable( event_data["result_read"]): event_data["result_read"] = repr(event_data["result_read"]) # For internal use return result_read