def _process_doc(self, thing, data) -> typing.Optional[typing.List[str]]: doc = "" doc_quoted: typing.Optional[typing.List[str]] = None if data.doc is not None: doc = data.doc elif "doxygen" in thing: doc = thing["doxygen"] doc = sphinxify.process_raw(doc) if doc: # TODO doc = doc.replace("\\", "\\\\").replace('"', '\\"') doc_quoted = doc.splitlines(keepends=True) doc_quoted = [ '"%s"' % (dq.replace("\n", "\\n"), ) for dq in doc_quoted ] return doc_quoted
def public_method_hook(fn, data): # Ignore operators, move constructors, copy constructors if ( fn["name"].startswith("operator") or fn.get("destructor") or (fn.get("constructor") and fn["parameters"][0]["name"] == "&") ): fn["data"] = {"ignore": True} return # Python exposed function name converted to camelcase x_name = fn["name"] x_name = x_name[0].lower() + x_name[1:] x_in_params = [] x_out_params = [] x_rets = [] # Simulation assertions x_param_checks = [] x_return_checks = [] data = data.get(fn["name"]) if data is None: # ensure every function is in our yaml print("WARNING:", fn["parent"]["name"], "method", fn["name"], "missing") data = {} # assert False, fn['name'] if "overloads" in data: _sig = ", ".join( p.get("enum", p["raw_type"]) + "&" * p["reference"] for p in fn["parameters"] ) if _sig in data["overloads"]: data = data.copy() data.update(data["overloads"][_sig]) else: print( "WARNING: Missing overload %s::%s(%s)" % (fn["parent"]["name"], fn["name"], _sig) ) param_override = data.get("param_override", {}) for i, p in enumerate(fn["parameters"]): if p["name"] == "": p["name"] = "param%s" % i p["x_type"] = p.get("enum", p["raw_type"]) p["x_callname"] = p["name"] # Python annotations for sim p["x_pyann_type"] = _to_annotation(p["x_type"]) if "forward_declared" in p: p["x_pyann_type"] = repr(p["x_pyann_type"]) fn["forward_declare"] = True fn["parent"]["has_fwd_declare"] = True if p["name"] in param_override: p.update(param_override[p["name"]]) p["x_pyann"] = "%(name)s: %(x_pyann_type)s" % p p["x_pyarg"] = 'py::arg("%(name)s")' % p if "default" in p: p["default"] = str(p["default"]) p["x_pyann"] += " = " + _values.get(p["default"], p["default"]) p["x_pyarg"] += "=" + p["default"] if p["pointer"]: p["x_callname"] = "&%(x_callname)s" % p x_out_params.append(p) elif p["array"]: asz = p.get("array_size", 0) if asz: p["x_pyann_type"] = "typing.List[%s]" % _to_annotation(p["raw_type"]) p["x_type"] = "std::array<%s, %s>" % (p["x_type"], asz) p["x_callname"] = "%(x_callname)s.data()" % p else: # it's a vector pass x_out_params.append(p) else: chk = _gen_check(p["name"], p["x_type"]) if chk: x_param_checks.append("assert %s" % chk) x_in_params.append(p) if p["constant"]: p["x_type"] = "const " + p["x_type"] p["x_type"] += "&" * p["reference"] p["x_decl"] = "%s %s" % (p["x_type"], p["name"]) x_callstart = "" x_callend = "" x_wrap_return = "" # Return all out parameters x_rets.extend(x_out_params) # if the function has out parameters and if the return value # is an error code, suppress the error code. This matches the Java # APIs, and the user can retrieve the error code from getLastError if # they really care if not len(x_rets) and fn["rtnType"] != "void": x_callstart = "auto __ret =" x_rets.insert( 0, dict( name="__ret", x_type=fn["rtnType"], x_pyann_type=_to_annotation(fn["rtnType"]), ), ) # Save some time in the common case -- set the error code to 0 # if there's a single retval and the type is ErrorCode if fn["rtnType"] == "CANError": x_param_checks.append("retval = CANError.kOK") if len(x_rets) == 1 and x_rets[0]["x_type"] != "void": x_wrap_return = "return %s;" % x_rets[0]["name"] x_wrap_return_type = x_rets[0]["x_type"] x_pyann_ret = x_rets[0]["x_pyann_type"] chk = _gen_check("retval", x_wrap_return_type, strict=True) if chk: x_return_checks.append("assert %s" % chk) elif len(x_rets) > 1: x_pyann_ret = "typing.Tuple[%s]" % ( ", ".join([p["x_pyann_type"] for p in x_rets]), ) x_wrap_return = "return std::make_tuple(%s);" % ",".join( [p["name"] for p in x_rets] ) x_wrap_return_type = "std::tuple<%s>" % ( ", ".join([p["x_type"] for p in x_rets]) ) x_return_checks.append( "assert isinstance(retval, tuple) and len(retval) == %s" % len(x_rets) ) for i, _p in enumerate(x_rets): chk = _gen_check("retval[%d]" % i, _p["raw_type"], strict=True) if chk: x_return_checks.append("assert %s" % chk) else: x_pyann_ret = "None" x_wrap_return_type = "void" # Temporary values to store out parameters in x_temprefs = "" if x_out_params: x_temprefs = ";".join(["%(x_type)s %(name)s" % p for p in x_out_params]) + ";" if "return" in data.get("code", ""): raise ValueError("%s: Do not use return, assign to retval instead" % fn["name"]) # Rename internal functions if data.get("internal", False): x_name = "_" + x_name elif data.get("rename", False): x_name = data["rename"] elif fn["constructor"]: x_name = "__init__" if "doc" in data: doc = data["doc"] elif "doxygen" in fn: # work around a CppHeaderParser bug doc = fn["doxygen"].rpartition("*//*")[2] doc = sphinxify.process_raw(doc) if "hook" in data: eval(data["hook"])(fn, data) name = fn["name"] hascode = "code" in data or "get" in data or "set" in data # lazy :) fn.update(locals())
def _function_hook(self, fn, data: FunctionData, internal: bool = False): """shared with methods/functions""" # Python exposed function name converted to camelcase x_name = self._set_name(fn["name"], data) if not data.rename and not x_name[:2].isupper(): x_name = x_name[0].lower() + x_name[1:] # if cpp_code is specified, don't release the gil unless the user # specifically asks for it if data.no_release_gil is None: if data.cpp_code: data.no_release_gil = True x_in_params = [] x_out_params = [] x_all_params = [] x_rets = [] x_temps = [] x_keepalives = [] x_genlambda = False x_lambda_pre = [] x_lambda_post = [] # Use this if one of the parameter types don't quite match param_override = data.param_override # buffers: accepts a python object that supports the buffer protocol # as input. If the buffer is an 'out' buffer, then it # will request a writeable buffer. Data is written by the # wrapped function to that buffer directly, and the length # written (if the length is a pointer) will be returned buffer_params = {} buflen_params = {} if data.buffers: for bufinfo in data.buffers: if bufinfo.src == bufinfo.len: raise ValueError( f"buffer src({bufinfo.src}) and len({bufinfo.len}) cannot be the same" ) buffer_params[bufinfo.src] = bufinfo buflen_params[bufinfo.len] = bufinfo self._add_type_caster(fn["returns"]) is_constructor = fn.get("constructor") for i, p in enumerate(fn["parameters"]): if is_constructor and p["reference"]: x_keepalives.append((1, i + 2)) if p["raw_type"] in _int32_types: p["fundamental"] = True p["unresolved"] = False if p["name"] == "": p["name"] = "param%s" % i p["x_type"] = p.get("enum", p["raw_type"]) p["x_callname"] = p["name"] p["x_retname"] = p["name"] po = param_override.get(p["name"]) if po: p.update(po.dict(exclude_unset=True)) p["x_pyarg"] = 'py::arg("%(name)s")' % p if "default" in p: p["default"] = self._resolve_default(fn, p["default"]) p["x_pyarg"] += "=" + p["default"] ptype = "in" bufinfo = buffer_params.pop(p["name"], None) buflen = buflen_params.pop(p["name"], None) if bufinfo: x_genlambda = True bname = f"__{bufinfo.src}" p["constant"] = 1 p["reference"] = 1 p["pointer"] = 0 p["x_callname"] = f"({p['x_type']}*){bname}.ptr" p["x_type"] = "py::buffer" # this doesn't seem to be true for bytearrays, which is silly # x_lambda_pre.append( # f'if (PyBuffer_IsContiguous((Py_buffer*){p["name"]}.ptr(), \'C\') == 0) throw py::value_error("{p["name"]}: buffer must be contiguous")' # ) # TODO: check for dimensions, strides, other dangerous things # bufinfo was validated and converted before it got here if bufinfo.type is BufferType.IN: ptype = "in" x_lambda_pre += [ f"auto {bname} = {p['name']}.request(false)" ] else: ptype = "in" x_lambda_pre += [ f"auto {bname} = {p['name']}.request(true)" ] x_lambda_pre += [ f"{bufinfo.len} = {bname}.size * {bname}.itemsize" ] if bufinfo.minsz: x_lambda_pre.append( f'if ({bufinfo.len} < {bufinfo.minsz}) throw py::value_error("{p["name"]}: minimum buffer size is {bufinfo.minsz}")' ) elif buflen: if p["pointer"]: p["x_callname"] = f"&{buflen.len}" ptype = "out" else: # if it's not a pointer, then the called function # can't communicate through it, so ignore the parameter p["x_callname"] = buflen.len x_temps.append(p) ptype = "ignored" elif p.get("force_out") or (p["pointer"] and not p["constant"] and p["fundamental"]): p["x_callname"] = "&%(x_callname)s" % p ptype = "out" elif p["array"]: asz = p.get("array_size", 0) if asz: p["x_type"] = "std::array<%s, %s>" % (p["x_type"], asz) p["x_callname"] = "%(x_callname)s.data()" % p else: # it's a vector pass ptype = "out" if p.get("ignore"): pass else: x_all_params.append(p) if ptype == "out": x_out_params.append(p) x_temps.append(p) elif ptype == "in": x_in_params.append(p) self._add_type_caster(p["x_type"]) if p["constant"]: p["x_type"] = "const " + p["x_type"] p["x_type_full"] = p["x_type"] p["x_type_full"] += "&" * p["reference"] p["x_type_full"] += "*" * p["pointer"] p["x_decl"] = "%s %s" % (p["x_type_full"], p["name"]) if buffer_params: raise ValueError("incorrect buffer param names '%s'" % ("', '".join(buffer_params.keys()))) x_callstart = "" x_callend = "" x_wrap_return = "" x_return_value_policy = _rvp_map[data.return_value_policy] if x_out_params: x_genlambda = True # Return all out parameters x_rets.extend(x_out_params) if fn["rtnType"] != "void": x_callstart = "auto __ret =" x_rets.insert(0, dict(x_retname="__ret", x_type=fn["rtnType"])) if len(x_rets) == 1 and x_rets[0]["x_type"] != "void": x_wrap_return = "return %s;" % x_rets[0]["x_retname"] elif len(x_rets) > 1: x_wrap_return = "return std::make_tuple(%s);" % ",".join( [p["x_retname"] for p in x_rets]) # Temporary values to store out parameters in if x_temps: for out in reversed(x_temps): odef = out.get("default", "0") x_lambda_pre.insert(0, f"{out['x_type']} {out['name']} = {odef}") # Rename functions if data.rename: x_name = data.rename elif data.internal or internal: x_name = "_" + x_name elif fn["constructor"]: x_name = "__init__" doc = "" doc_quoted = "" if data.doc is not None: doc = data.doc elif "doxygen" in fn: doc = fn["doxygen"] doc = sphinxify.process_raw(doc) if doc: # TODO doc = doc.replace("\\", "\\\\").replace('"', '\\"') doc_quoted = doc.splitlines(keepends=True) doc_quoted = [ '"%s"' % (dq.replace("\n", "\\n"), ) for dq in doc_quoted ] if data.keepalive is not None: x_keepalives = data.keepalive # if "hook" in data: # eval(data["hook"])(fn, data) # bind new attributes to the function definition # -> previously used locals(), but this is more explicit # and easier to not mess up fn.update( dict( data=data, # transforms x_name=x_name, x_all_params=x_all_params, x_in_params=x_in_params, x_out_params=x_out_params, x_rets=x_rets, x_keepalives=x_keepalives, x_return_value_policy=x_return_value_policy, # lambda generation x_genlambda=x_genlambda, x_callstart=x_callstart, x_lambda_pre=x_lambda_pre, x_lambda_post=x_lambda_post, x_callend=x_callend, x_wrap_return=x_wrap_return, # docstrings x_doc=doc, x_doc_quoted=doc_quoted, ))
def _function_hook(fn, global_data, fn_data, typ): """shared with methods/functions""" # Ignore operators, move constructors, copy constructors if (fn.get("operator") or fn.get("destructor") or (fn.get("constructor") and fn["parameters"] and fn["parameters"][0]["class"] and fn["parameters"][0]["class"]["name"] == fn["name"])): fn["data"] = typ({"ignore": True}) return # Python exposed function name converted to camelcase x_name = _strip_prefixes(global_data, fn["name"]) x_name = x_name[0].lower() + x_name[1:] x_in_params = [] x_out_params = [] x_rets = [] x_temps = [] x_genlambda = False x_lambda_pre = [] x_lambda_post = [] data = fn_data.get(fn["name"], _missing) if data is _missing: # ensure every function is in our yaml so someone can review it if "parent" in fn: print("WARNING:", fn["parent"]["name"], "method", fn["name"], "missing") else: print("WARNING: function", fn["name"], "missing") data = typ() # assert False, fn['name'] elif data is None: data = typ() if getattr(data, "overloads", {}): _sig = ", ".join( p.get("enum", p["raw_type"]) + "&" * p["reference"] + "*" * p["pointer"] for p in fn["parameters"]) if _sig in data.overloads: overload = data.overloads[_sig] if overload: data = data.to_native() data.update(overload.to_native()) data = typ(data) else: print("WARNING: Missing overload %s::%s(%s)" % (fn["parent"]["name"], fn["name"], _sig)) # Use this if one of the parameter types don't quite match param_override = data.param_override # fix cppheaderparser quirk if len(fn["parameters"]) == 1: p = fn["parameters"][0] if p["type"] == "void" and not p["pointer"]: fn["parameters"] = [] # buffers: accepts a python object that supports the buffer protocol # as input. If the buffer is an 'out' buffer, then it # will request a writeable buffer. Data is written by the # wrapped function to that buffer directly, and the length # written (if the length is a pointer) will be returned buffer_params = {} buflen_params = {} if data.buffers: for bufinfo in data.buffers: if bufinfo.src == bufinfo.len: raise ValueError( f"buffer src({bufinfo.src}) and len({bufinfo.len}) cannot be the same" ) buffer_params[bufinfo.src] = bufinfo buflen_params[bufinfo.len] = bufinfo for i, p in enumerate(fn["parameters"]): if p["raw_type"] in _int32_types: p["fundamental"] = True p["unresolved"] = False if p["name"] == "": p["name"] = "param%s" % i p["x_type"] = p.get("enum", p["raw_type"]) p["x_callname"] = p["name"] p["x_retname"] = p["name"] if "forward_declared" in p: fn["forward_declare"] = True if "parent" in fn: fn["parent"]["has_fwd_declare"] = True po = param_override.get(p["name"]) if po: p.update(po.to_native()) p["x_pyarg"] = 'py::arg("%(name)s")' % p if "default" in p: p["default"] = _resolve_default(fn, p["default"]) p["x_pyarg"] += "=" + p["default"] ptype = "in" bufinfo = buffer_params.pop(p["name"], None) buflen = buflen_params.pop(p["name"], None) if bufinfo: x_genlambda = True bname = f"__{bufinfo.src}" p["constant"] = 1 p["reference"] = 1 p["pointer"] = 0 p["x_callname"] = f"({p['x_type']}*){bname}.ptr" p["x_type"] = "py::buffer" # this doesn't seem to be true for bytearrays, which is silly # x_lambda_pre.append( # f'if (PyBuffer_IsContiguous((Py_buffer*){p["name"]}.ptr(), \'C\') == 0) throw py::value_error("{p["name"]}: buffer must be contiguous")' # ) # TODO: check for dimensions, strides, other dangerous things if bufinfo.type == "in": ptype = "in" x_lambda_pre += [f"auto {bname} = {p['name']}.request(false)"] elif bufinfo.type in ("inout", "out"): ptype = "in" x_lambda_pre += [f"auto {bname} = {p['name']}.request(true)"] else: raise ValueError("Invalid bufinfo type %s" % (bufinfo.type)) x_lambda_pre += [ f"{bufinfo.len} = {bname}.size * {bname}.itemsize" ] if bufinfo.minsz: x_lambda_pre.append( f'if ({bufinfo.len} < {bufinfo.minsz}) throw py::value_error("{p["name"]}: minimum buffer size is {bufinfo.minsz}")' ) elif buflen: if p["pointer"]: p["x_callname"] = f"&{buflen.len}" ptype = "out" else: # if it's not a pointer, then the called function # can't communicate through it, so ignore the parameter p["x_callname"] = buflen.len x_temps.append(p) ptype = "ignored" elif p["pointer"] and not p["constant"] and p["fundamental"]: p["x_callname"] = "&%(x_callname)s" % p ptype = "out" elif p["array"]: asz = p.get("array_size", 0) if asz: p["x_type"] = "std::array<%s, %s>" % (p["x_type"], asz) p["x_callname"] = "%(x_callname)s.data()" % p else: # it's a vector pass ptype = "out" if ptype == "out": x_out_params.append(p) x_temps.append(p) elif ptype == "in": x_in_params.append(p) if p["constant"]: p["x_type"] = "const " + p["x_type"] p["x_type_full"] = p["x_type"] p["x_type_full"] += "&" * p["reference"] p["x_type_full"] += "*" * p["pointer"] p["x_decl"] = "%s %s" % (p["x_type_full"], p["name"]) if buffer_params: raise ValueError("incorrect buffer param names '%s'" % ("', '".join(buffer_params.keys()))) x_callstart = "" x_callend = "" x_wrap_return = "" if x_out_params: x_genlambda = True # Return all out parameters x_rets.extend(x_out_params) if fn["rtnType"] != "void": x_callstart = "auto __ret =" x_rets.insert(0, dict(x_retname="__ret", x_type=fn["rtnType"])) if len(x_rets) == 1 and x_rets[0]["x_type"] != "void": x_wrap_return = "return %s;" % x_rets[0]["x_retname"] elif len(x_rets) > 1: x_wrap_return = "return std::make_tuple(%s);" % ",".join( [p["x_retname"] for p in x_rets]) # Temporary values to store out parameters in if x_temps: for out in reversed(x_temps): x_lambda_pre.insert(0, "%(x_type)s %(name)s = 0" % out) # Rename internal functions if data.internal: x_name = "_" + x_name elif data.rename: x_name = data.rename elif fn["constructor"]: x_name = "__init__" doc = "" doc_quoted = "" if data.doc is not None: doc = data.doc elif "doxygen" in fn: # work around a CppHeaderParser bug doc = fn["doxygen"].rpartition("*//*")[2] doc = sphinxify.process_raw(doc) if doc: # TODO doc = doc.replace("\\", "\\\\").replace('"', '\\"') doc_quoted = doc.splitlines(keepends=True) doc_quoted = [ '"%s"' % (dq.replace("\n", "\\n"), ) for dq in doc_quoted ] # if "hook" in data: # eval(data["hook"])(fn, data) # bind new attributes to the function definition # -> previously used locals(), but this is more explicit # and easier to not mess up fn.update( dict( data=data, # transforms x_name=x_name, x_in_params=x_in_params, x_out_params=x_out_params, x_rets=x_rets, # lambda generation x_genlambda=x_genlambda, x_callstart=x_callstart, x_lambda_pre=x_lambda_pre, x_lambda_post=x_lambda_post, x_callend=x_callend, x_wrap_return=x_wrap_return, # docstrings x_doc=doc, x_doc_quoted=doc_quoted, ))