def _prepare_optypes(cls): """Converts a constructor argspec into a list of Optype objects.""" func = cls.__dict__.get('__init__') # to avoid MRO lookup if func is None or isinstance(func, type(object.__init__)): # no constructor, or it is a wrapper descriptor, give up return [] args, va, kw, dfls = inspect.getargspec(inspect.unwrap(func)) dfls = dfls or [] if not args and not va: raise TypeError('Module must accept at least one argument') for arg in args: if not isinstance(arg, str): raise TypeError('Tuple parameter unpacking ' 'is not supported: {arg}'.format(**locals())) if args: # forget about the first arg (which is usually 'self') if len(args) == len(dfls): del dfls[0] del args[0] def to_optype(optype_or_default): if isinstance(optype_or_default, core.Optype): return optype_or_default return core.Optype(optype_or_default) head = [core.Optype() for _ in range(len(args) - len(dfls))] tail = list(map(to_optype, dfls)) return [(name, optype.set(name=name)) for name, optype in zip(args, head + tail)]
def arggetter(func, _cache={}): if func in _cache: return _cache[func] original = getattr(func, '__original__', None) or unwrap(func) argnames = get_argnames(original) indexes = dict((name, i) for i, name in enumerate(argnames)) defaults_tuple = original.__defaults__ if defaults_tuple: defaults = dict(zip(argnames[-len(defaults_tuple):], defaults_tuple)) else: defaults = {} def get_arg(name, args, kwargs): if name not in indexes: raise TypeError("%s() doesn't have argument named %s" % (func.__name__, name)) else: index = indexes[name] if index < len(args): return args[index] elif name in kwargs: return kwargs[name] elif name in defaults: return defaults[name] else: raise TypeError("%s() missing required argument: '%s'" % (func.__name__, name)) _cache[func] = get_arg return get_arg
def _apply_argparser(func): spec = inspect.getfullargspec(inspect.unwrap(func)) arguments = list() # first argument is context # args defaults_len = spec.defaults and len(spec.defaults) or 0 args_len = len(spec.args) - defaults_len for i in range(1, args_len): name = spec.args[i] if not name.startswith('_'): arguments.append((name, spec.annotations.get(name), name.endswith('s'))) # kwargs for i in range(defaults_len): name = spec.args[args_len + i] if not name.startswith('_'): default = spec.defaults[i] if name in spec.annotations: annotation = spec.annotations[name] elif default is not None: annotation = type(default) else: annotation = None arguments.append((name, annotation, False, default)) return argparser.argparser(arguments)(func)
def process_signature(app, what: str, name: str, obj, options, signature, return_annotation): if not callable(obj): return if what in ('class', 'exception'): obj = getattr(obj, '__init__', getattr(obj, '__new__', None)) if not getattr(obj, '__annotations__', None): return obj = unwrap(obj) try: argspec = getargspec(obj) except (TypeError, ValueError): return if argspec.args: if what in ('class', 'exception'): del argspec.args[0] elif what == 'method': outer = inspect.getmodule(obj) for clsname in obj.__qualname__.split('.')[:-1]: outer = getattr(outer, clsname) method_object = outer.__dict__[obj.__name__] if not isinstance(method_object, (classmethod, staticmethod)): del argspec.args[0] try: result = formatargspec(obj, *argspec[:-1]), None except NameError: # This happens when the type annotation is conditionally imported with TYPE_CHECKING return return result
def identity_register(cls, func): """ used for __new__. Instead of returning new instance we just register and return func. """ func = inspect.unwrap(func) cls._registry.add(func) return func
def decorator(function): argspec = inspect.getfullargspec(inspect.unwrap(function)) for arg in args: if arg not in argspec.args and arg not in argspec.kwonlyargs: raise UnknownArgument('Unable to mark unknown argument %s ' 'as non-injectable.' % arg) existing = getattr(function, '__noninjectables__', set()) merged = existing | set(args) function.__noninjectables__ = merged return function
def _get_function_source(func): func = inspect.unwrap(func) if inspect.isfunction(func): code = func.__code__ return (code.co_filename, code.co_firstlineno) if isinstance(func, functools.partial): return _get_function_source(func.func) if isinstance(func, functools.partialmethod): return _get_function_source(func.func) return None
def __init__(self, decorator_function, *args, **kwargs): '''Create a new wrapper around the decorated function''' super().__init__(decorator_function, *args, **kwargs) real_func = inspect.unwrap(decorator_function) # flag the function as an endpoint. doing it on the actual function because # we don't know the order of decorators on the function. order only matters if # the other decorators don't use @wraps correctly .in that case, @view_function # will put DECORATED_KEY on the decorator function rather than the real function. # but even that is fine *as long as @view_function is listed first*. self.DECORATED_FUNCTIONS.add(real_func)
def __getattr__(self, name): if not self._introspected: # Find real func to call getcallargs() on it # We need to do it since our decorators don't preserve signature func = unwrap(self._func) self.__dict__.update(getcallargs(func, *self._args, **self._kwargs)) self._introspected = True try: return self.__dict__[name] except KeyError: raise AttributeError('Function %s does not have argument %s' \ % (self._func.__name__, name))
def _get_function_source(func): if compat.PY34: func = inspect.unwrap(func) elif hasattr(func, '__wrapped__'): func = func.__wrapped__ if inspect.isfunction(func): code = func.__code__ return (code.co_filename, code.co_firstlineno) if isinstance(func, functools.partial): return _get_function_source(func.func) if compat.PY34 and isinstance(func, functools.partialmethod): return _get_function_source(func.func) return None
def linkcode_resolve(domain, info): """ Determine the URL corresponding to Python object """ if domain != 'py': return None modname = info['module'] fullname = info['fullname'] submod = sys.modules.get(modname) if submod is None: return None obj = submod for part in fullname.split('.'): try: obj = getattr(obj, part) except AttributeError: return None try: # inspect.unwrap() was added in Python version 3.4 if sys.version_info >= (3, 5): fn = inspect.getsourcefile(inspect.unwrap(obj)) else: fn = inspect.getsourcefile(obj) except TypeError: fn = None if not fn: return None try: source, lineno = inspect.getsourcelines(obj) except OSError: lineno = None if lineno: linespec = "#L{:d}-L{:d}".format(lineno, lineno + len(source) - 1) else: linespec = "" fn = os.path.relpath(fn, start=os.path.dirname(pandas.__file__)) if '+' in pandas.__version__: return ("http://github.com/pandas-dev/pandas/blob/master/pandas/" "{}{}".format(fn, linespec)) else: return ("http://github.com/pandas-dev/pandas/blob/" "v{}/pandas/{}{}".format(pandas.__version__, fn, linespec))
def process_docstring(app, what, name, obj, options, lines): if isinstance(obj, property): obj = obj.fget if callable(obj): if what in ('class', 'exception'): obj = getattr(obj, '__init__') obj = unwrap(obj) try: type_hints = get_type_hints(obj) except (AttributeError, TypeError): # Introspecting a slot wrapper will raise TypeError return except NameError: # This happens when the type annotation is conditionally imported with TYPE_CHECKING return for argname, annotation in type_hints.items(): formatted_annotation = format_annotation(annotation) if argname == 'return': if what in ('class', 'exception'): # Don't add return type None from __init__() continue insert_index = len(lines) for i, line in enumerate(lines): if line.startswith(':rtype:'): insert_index = None break elif line.startswith(':return:') or line.startswith(':returns:'): insert_index = i if insert_index is not None: if insert_index == len(lines): # Ensure that :rtype: doesn't get joined with a paragraph of text, which # prevents it being interpreted. lines.append('') insert_index += 1 lines.insert(insert_index, ':rtype: {}'.format(formatted_annotation)) else: searchfor = ':param {}:'.format(argname) for i, line in enumerate(lines): if line.startswith(searchfor): lines.insert(i, ':type {}: {}'.format(argname, formatted_annotation)) break
def args_check(name, func, provided): provided = list(provided) # First argument, filter input, is implied. plen = len(provided) + 1 # Check to see if a decorator is providing the real function. func = unwrap(func) args, _, _, defaults, _, _, _ = getfullargspec(func) alen = len(args) dlen = len(defaults or []) # Not enough OR Too many if plen < (alen - dlen) or plen > alen: raise TemplateSyntaxError("%s requires %d arguments, %d provided" % (name, alen - dlen, plen)) return True
def _rule_indices_from_argspec(func, with_p=True): args, _, _, defaults = inspect.getargspec(inspect.unwrap(func)) nr_args = len(args) defaults = list(defaults) if defaults is not None else [] if with_p: if not nr_args: raise TypeError("need at least 'p' argument") if len(defaults) == nr_args: defaults = defaults[1:] nr_args -= 1 if None in defaults: def_nones = defaults[defaults.index(None):] if def_nones.count(None) != len(def_nones): raise TypeError("index argument after 'None'") def_indices = defaults[:-len(def_nones)] else: def_indices = defaults return list(range(1, nr_args-len(defaults)+1)) + def_indices
def update_wrapper(wrapper, wrapped, assigned = WRAPPER_ASSIGNMENTS, updated = WRAPPER_UPDATES): for attr in assigned: try: value = getattr(wrapped, attr) except AttributeError: pass else: setattr(wrapper, attr, value) for attr in updated: getattr(wrapper, attr).update(getattr(wrapped, attr, {})) # Set it after to not gobble it in __dict__ update wrapper.__wrapped__ = wrapped # Set an original ref for faster and more convenient access wrapper.__original__ = getattr(wrapped, '__original__', None) or unwrap(wrapped) # Return the wrapper so this can be used as a decorator via partial() return wrapper
def get_source(obj): source = obj if isinstance(obj, types.ModuleType): source = inspect.getsource(obj) elif isinstance(obj, types.FunctionType): # try source generated from create_function first source = getattr(obj, '__asttools_source__', None) if source is None: source = inspect.unwrap(obj) source = dedent(inspect.getsource(source)) source_lines = source.split('\n') # remove decorators not_decorator = lambda line: not line.startswith('@') source = '\n'.join(filter(not_decorator, source_lines)) elif isinstance(obj, types.LambdaType): source = inspect.getsource(obj) if isinstance(source, (str)): source = dedent(source) else: raise NotImplementedError("{0}".format(str(source))) return source
def dec(func): params, varargs, varkw, defaults, kwonly, kwonly_defaults, _ = getfullargspec(unwrap(func)) function_name = (name or getattr(func, '_decorated_function', func).__name__) @functools.wraps(func) def compile_func(parser, token): bits = token.split_contents()[1:] args, kwargs = parse_bits( parser, bits, params, varargs, varkw, defaults, kwonly, kwonly_defaults, takes_context, function_name, ) return InclusionNode( func, takes_context, args, kwargs, filename, ) self.tag(function_name, compile_func) return func
def is_generator_fixture(self): """ True if the fixture is a generator function (and thus contains teardown code). """ return inspect.isgeneratorfunction(inspect.unwrap(self.fn))
def linkcode_resolve(domain, info): # ================================================================= # Must delete all .doctrees directories in build for changes to be # picked up. E.g.: # # >> rm -fr build/.doctrees build/*/.doctrees build/*/*/.doctrees # ================================================================= online_source_code = True if domain != "py": return None if not info["module"]: return None modname = info["module"] fullname = info["fullname"] submod = sys.modules.get(modname, None) if submod is None: return None obj = submod for part in fullname.split("."): try: obj = getattr(obj, part) except AttributeError: return None # Get the object wrapped by obj. This makes sure that the actual # code, rather that the decorator's code, get used. obj = inspect.unwrap(obj) try: fn = inspect.getsourcefile(obj) except TypeError: fn = None if not fn: return None try: source, lineno = inspect.findsource(obj) except OSError: lineno = None try: nlines = len(inspect.getsourcelines(obj)[0]) except (OSError, IndexError): nlines = None fn = relpath(fn, start=dirname(cf.__file__)) if lineno: linespec = "#L{}".format(lineno + 1) if nlines: linespec += "-L{}".format(lineno + nlines) else: linespec = "" # ---------------------------------------------------------------- # NOTE: You need to touch the .rst files to get the change in # ---------------------------------------------------------------- if online_source_code: if "cfdm/" in fn: # Point to on-line cfdm # code. E.g. https://github.com/NCAS-CMS/cfdm/blob/v1.7.8/cfdm/field.py#L619 fn = re.sub("^.*(cfdm/.*)", "\\1", fn) url = "https://github.com/NCAS-CMS/cfdm/blob/v{0}/{1}{2}".format( cfdm_version, fn, linespec) else: # Point to on-line cf # code. E.g. https://github.com/NCAS-CMS/cf-python/blob/v3.0.1/cf/data/data.py#L4292 url = "https://github.com/NCAS-CMS/cf-python/blob/v{0}/cf/{1}{2}".format( link_release, fn, linespec) print(url) return url else: # Point to local source code relative to this directory. return "../../../cf/{0}{1}".format(fn, linespec)
def get_argnames(func): func = getattr(func, '__original__', None) or unwrap(func) return func.__code__.co_varnames[:func.__code__.co_argcount]
def __init__(self, module: Union[ModuleType, str], *, docfilter: Callable[[Doc], bool] = None, supermodule: 'Module' = None, context: Context = None): """ Creates a `Module` documentation object given the actual module Python object. `docfilter` is an optional predicate that controls which sub-objects are documentated (see also: `pdoc.html()`). `supermodule` is the parent `pdoc.Module` this module is a submodule of. `context` is an instance of `pdoc.Context`. If `None` a global context object will be used. """ if isinstance(module, str): module = import_module(module) super().__init__(module.__name__, self, module) if self.name.endswith('.__init__') and not self.is_package: self.name = self.name[:-len('.__init__')] self._context = _global_context if context is None else context """ A lookup table for ALL doc objects of all modules that share this context, mainly used in `Module.find_ident()`. """ self.supermodule = supermodule """ The parent `pdoc.Module` this module is a submodule of, or `None`. """ self.doc = {} # type: Dict[str, Doc] """A mapping from identifier name to a documentation object.""" self._is_inheritance_linked = False """Re-entry guard for `pdoc.Module._link_inheritance()`.""" var_docstrings, _ = _pep224_docstrings(self) # Populate self.doc with this module's public members if hasattr(self.obj, '__all__'): public_objs = [] for name in self.obj.__all__: try: public_objs.append((name, getattr(self.obj, name))) except AttributeError: warn("Module {!r} doesn't contain identifier `{}` " "exported in `__all__`".format(self.module, name)) else: def is_from_this_module(obj): mod = inspect.getmodule(obj) return mod is None or mod.__name__ == self.obj.__name__ public_objs = [ (name, inspect.unwrap(obj)) for name, obj in inspect.getmembers(self.obj) if (_is_public(name) and ( is_from_this_module(obj) or name in var_docstrings)) ] index = list(self.obj.__dict__).index public_objs.sort(key=lambda i: index(i[0])) for name, obj in public_objs: if _is_function(obj): self.doc[name] = Function(name, self, obj) elif inspect.isclass(obj): self.doc[name] = Class(name, self, obj) elif name in var_docstrings: self.doc[name] = Variable(name, self, var_docstrings[name], obj=obj) # If the module is a package, scan the directory for submodules if self.is_package: def iter_modules(paths): """ Custom implementation of `pkgutil.iter_modules()` because that one doesn't play well with namespace packages. See: https://github.com/pypa/setuptools/issues/83 """ from os.path import isdir, join, splitext for pth in paths: for file in os.listdir(pth): if file.startswith( ('.', '__pycache__', '__init__.py')): continue if file.endswith(_SOURCE_SUFFIXES): yield splitext(file)[0] if isdir(join(pth, file)) and '.' not in file: yield file for root in iter_modules(self.obj.__path__): # Ignore if this module was already doc'd. if root in self.doc: continue # Ignore if it isn't exported if not _is_public(root): continue assert self.refname == self.name fullname = "%s.%s" % (self.name, root) self.doc[root] = m = Module(import_module(fullname), docfilter=docfilter, supermodule=self, context=self._context) # Skip empty namespace packages because they may # as well be other auxiliary directories if m.is_namespace and not m.doc: del self.doc[root] # Apply docfilter if docfilter: for name, dobj in self.doc.copy().items(): if not docfilter(dobj): self.doc.pop(name) # Build the reference name dictionary of the module self._context[self.refname] = self for docobj in self.doc.values(): self._context[docobj.refname] = docobj if isinstance(docobj, Class): self._context.update( (obj.refname, obj) for obj in docobj.doc.values())
def boilerplate_gen(): """Generator of lines for the automated part of pyplot.""" # these methods are all simple wrappers of Axes methods by the same # name. _plotcommands = ( 'acorr', 'angle_spectrum', 'arrow', 'axhline', 'axhspan', 'axvline', 'axvspan', 'bar', 'barh', 'broken_barh', 'boxplot', 'cohere', 'clabel', 'contour', 'contourf', 'csd', 'errorbar', 'eventplot', 'fill', 'fill_between', 'fill_betweenx', 'hexbin', 'hist', 'hist2d', 'hlines', 'imshow', 'loglog', 'magnitude_spectrum', 'pcolor', 'pcolormesh', 'phase_spectrum', 'pie', 'plot', 'plot_date', 'psd', 'quiver', 'quiverkey', 'scatter', 'semilogx', 'semilogy', 'specgram', #'spy', 'stackplot', 'stem', 'step', 'streamplot', 'tricontour', 'tricontourf', 'tripcolor', 'triplot', 'violinplot', 'vlines', 'xcorr', 'barbs', ) _misccommands = ( 'cla', 'grid', 'legend', 'table', 'text', 'annotate', 'ticklabel_format', 'locator_params', 'tick_params', 'margins', 'autoscale', ) cmappable = { 'contour': 'if %(ret)s._A is not None: sci(%(ret)s)', 'contourf': 'if %(ret)s._A is not None: sci(%(ret)s)', 'hexbin': 'sci(%(ret)s)', 'scatter': 'sci(%(ret)s)', 'pcolor': 'sci(%(ret)s)', 'pcolormesh': 'sci(%(ret)s)', 'hist2d': 'sci(%(ret)s[-1])', 'imshow': 'sci(%(ret)s)', #'spy' : 'sci(%(ret)s)', ### may return image or Line2D 'quiver': 'sci(%(ret)s)', 'specgram': 'sci(%(ret)s[-1])', 'streamplot': 'sci(%(ret)s.lines)', 'tricontour': 'if %(ret)s._A is not None: sci(%(ret)s)', 'tricontourf': 'if %(ret)s._A is not None: sci(%(ret)s)', 'tripcolor': 'sci(%(ret)s)', } def format_value(value): """ Format function default values as needed for inspect.formatargspec. The interesting part is a hard-coded list of functions used as defaults in pyplot methods. """ if isinstance(value, types.FunctionType): if value.__name__ in ('detrend_none', 'window_hanning'): return '=mlab.' + value.__name__ if value.__name__ == 'mean': return '=np.' + value.__name__ raise ValueError(('default value %s unknown to boilerplate.' + 'formatvalue') % value) return '=' + repr(value) text_wrapper = textwrap.TextWrapper(break_long_words=False) for fmt, cmdlist in [(PLOT_TEMPLATE, _plotcommands), (MISC_FN_TEMPLATE, _misccommands)]: for func in cmdlist: # For some commands, an additional line is needed to set the # color map if func in cmappable: mappable = ' ' + cmappable[func] % locals() else: mappable = '' # Get argspec of wrapped function base_func = getattr(Axes, func) has_data = 'data' in inspect.signature(base_func).parameters work_func = inspect.unwrap(base_func) if six.PY2: args, varargs, varkw, defaults = inspect.getargspec(work_func) else: (args, varargs, varkw, defaults, kwonlyargs, kwonlydefs, annotations) = inspect.getfullargspec(work_func) args.pop(0) # remove 'self' argument if defaults is None: defaults = () else: def_edited = [] for val in defaults: if six.PY2: if isinstance(val, unicode): val = val.encode('ascii', 'ignore') def_edited.append(val) defaults = tuple(def_edited) # Add a data keyword argument if needed (fmt is PLOT_TEMPLATE) and # possible (if *args is used, we can't just add a data # argument in front of it since it would gobble one of the # arguments the user means to pass via *args) # This needs to be done here so that it goes into call if not varargs and fmt is PLOT_TEMPLATE and has_data: args.append('data') defaults = defaults + (None,) # How to call the wrapped function call = [] for i, arg in enumerate(args): if len(defaults) < len(args) - i: call.append('%s' % arg) else: call.append('%s=%s' % (arg, arg)) # remove the data keyword as it was needed above to go into the # call but should go after `hold` in the signature. # This is janky as all get out, but hopefully boilerplate will # be retired soon. if not varargs and fmt is PLOT_TEMPLATE and has_data: args.pop() defaults = defaults[:-1] if varargs is not None: call.append('*' + varargs) if varkw is not None: call.append('**' + varkw) call = ', '.join(call) text_wrapper.width = 80 - 19 - len(func) join_with = '\n' + ' ' * (18 + len(func)) call = join_with.join(text_wrapper.wrap(call)) # Add a hold keyword argument if needed (fmt is PLOT_TEMPLATE) and # possible (if *args is used, we can't just add a hold # argument in front of it since it would gobble one of the # arguments the user means to pass via *args) if varargs: sethold = " hold = %(varkw)s.pop('hold', None)" % locals() elif fmt is PLOT_TEMPLATE: args.append('hold') defaults = defaults + (None,) if has_data: args.append('data') defaults = defaults + (None,) sethold = '' # Now we can build the argspec for defining the wrapper argspec = inspect.formatargspec(args, varargs, varkw, defaults, formatvalue=format_value) argspec = argspec[1:-1] # remove parens text_wrapper.width = 80 - 5 - len(func) join_with = '\n' + ' ' * (5 + len(func)) argspec = join_with.join(text_wrapper.wrap(argspec)) # A gensym-like facility in case some function takes an # argument named washold, ax, or ret washold, ret, ax = 'washold', 'ret', 'ax' bad = set(args) | set((varargs, varkw)) while washold in bad or ret in bad or ax in bad: washold = 'washold' + str(random.randrange(10 ** 12)) ret = 'ret' + str(random.randrange(10 ** 12)) ax = 'ax' + str(random.randrange(10 ** 12)) # Since we can't avoid using some function names, # bail out if they are used as argument names for reserved in ('gca', 'gci'): if reserved in bad: msg = 'Axes method %s has kwarg named %s' % (func, reserved) raise ValueError(msg) yield fmt % locals() cmaps = ( 'autumn', 'bone', 'cool', 'copper', 'flag', 'gray', 'hot', 'hsv', 'jet', 'pink', 'prism', 'spring', 'summer', 'winter', 'spectral', 'magma', 'inferno', 'plasma', 'viridis', 'sunlight', 'twilight' ) # add all the colormaps (autumn, hsv, ....) for name in cmaps: yield CMAP_TEMPLATE.format(name=name) yield '' yield '_setup_pyplot_info_docstrings()'
# Copyright 2020 Alexandre Díaz <*****@*****.**> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). from inspect import unwrap from odoo import api, models, tools from odoo.http import request from odoo.addons.base.models.ir_qweb import IrQWeb from .assetsbundle import AssetsBundleCompanyColor # Monkey Patch to change the ormcache_context decorator of '_get_asset_nodes' to # add 'active_company_id' context key. This is done to avoid "clear_caches" usage # that works in a more aggressive way to the LRU cache. _orig_get_asset_nodes = unwrap(IrQWeb._get_asset_nodes) @tools.conditional( "xml" not in tools.config["dev_mode"], tools.ormcache_context( "xmlid", 'options.get("lang", "en_US")', "css", "js", "debug", "async_load", "defer_load", "lazy_load", keys=("website_id", "active_company_id"), ),
def _estimate_effect(self): n_samples = self._treatment.shape[0] X = None # Effect modifiers W = None # common causes/ confounders Z = None # Instruments Y = self._outcome T = self._treatment if self._effect_modifiers is not None: X = self._effect_modifiers if self._observed_common_causes_names: W = self._observed_common_causes if self.estimating_instrument_names: Z = self._estimating_instruments named_data_args = {'Y': Y, 'T': T, 'X': X, 'W': W, 'Z': Z} # Calling the econml estimator's fit method estimator_argspec = inspect.getfullargspec( inspect.unwrap(self.estimator.fit)) # As of v0.9, econml has some kewyord only arguments estimator_named_args = estimator_argspec.args + estimator_argspec.kwonlyargs estimator_data_args = { arg: named_data_args[arg] for arg in named_data_args.keys() if arg in estimator_named_args } self.estimator.fit(**estimator_data_args, **self.method_params["fit_params"]) X_test = X n_target_units = n_samples if X is not None: if type(self._target_units) is pd.DataFrame: X_test = self._target_units elif callable(self._target_units): filtered_rows = self._data.where(self._target_units) boolean_criterion = np.array(filtered_rows.notnull().iloc[:, 0]) X_test = X[boolean_criterion] n_target_units = X_test.shape[0] # Changing shape to a list for a singleton value if type(self._control_value) is not list: self._control_value = [self._control_value] if type(self._treatment_value) is not list: self._treatment_value = [self._treatment_value] T0_test = np.repeat([self._control_value], n_target_units, axis=0) T1_test = np.repeat([self._treatment_value], n_target_units, axis=0) est = self.estimator.effect(X_test, T0=T0_test, T1=T1_test) ate = np.mean(est) self.effect_intervals = None if self._confidence_intervals: self.effect_intervals = self.estimator.effect_interval( X_test, T0=T0_test, T1=T1_test, alpha=1 - self.confidence_level) estimate = CausalEstimate( estimate=ate, control_value=self._control_value, treatment_value=self._treatment_value, target_estimand=self._target_estimand, realized_estimand_expr=self.symbolic_estimator, cate_estimates=est, effect_intervals=self.effect_intervals, _estimator_object=self.estimator) return estimate
def capture_args(function, _locals, omit_defaulted_params: bool = False): """ Extract the function's arguments from the local variable dict. Ideally, we would replicate the original arguments exactly. Unfortunately, there is no way to tell whether a parameter was passed as positional or keyword argument, or if a keyword parameter was passed at all. As far as possible, arguments are stored by their name in the keyword argument dict. If that is not possible, they are stored as positional arguments. This is only necessary for positional arguments before a var positional argument. If a parameter has not been specified, it might be desireable to omit it from serialization, so that future changes to the default value are possible. However, there is no way to tell whether a parameter was not passed, or if it was explicitly specified to have the default value. Thus, this functionality is turned off by default. You can specify omit_defaulted_params=True to remove default values for all parameters, or pass a list of parameter names whose default values should be removed. .. code-block:: python def foo(a1,a2=None,*va, kw1=2, **kw): # Get passed arguments args, kwargs = capture_args(foo, locals(), omit_defaulted_params=['kw1']) # ... :param function: The function whose arguments to extract. :param _locals: Local variable dict as retured by builtin locals() :param omit_defaulted_params: Whether to omit keyword arguments whose value is their default. This may be True to do so for all keyword arguments, or a container to do so for the listed arguments. :return: tuple (args, kwargs) """ # Since we should be called by the function definition, we do not want to see any modified signature added by delegates. function = unwrap(function) # Use inspect.Signature to determine parameter names. sig = signature(function) # Collect parameter values pos_args = [] kw_args = OrderedDict() for p in sig.parameters.values(): if p.kind == Parameter.POSITIONAL_ONLY: # Always use positional arguments here pos_args.append(_locals[p.name]) elif ( p.kind == Parameter.POSITIONAL_OR_KEYWORD or p.kind == Parameter.POSITIONAL_ONLY or p.kind == Parameter.KEYWORD_ONLY ): # Add as keyword argument for now. kw_args[p.name] = _locals[p.name] elif p.kind == Parameter.VAR_POSITIONAL: varargs = _locals[p.name] if len(varargs) == 0: # The special treatment is not required if the parameter is defined, but empty, since that means that # no varargs were passed. continue # If we have a var positional param, all pos_or_kw params coming before must be passed as positional params. pos_args.extend(kw_args.values()) kw_args.clear() # Now append varargs pos_args.extend(varargs) elif p.kind == Parameter.VAR_KEYWORD: # Add var keywords to kw dict kw_args.update(_locals[p.name]) # Omit parameters that have their default values if requested if omit_defaulted_params is True: # Do for all for p in sig.parameters.values(): if p.name in kw_args and kw_args[p.name] == p.default: del kw_args[p.name] elif isinstance(omit_defaulted_params, Iterable): # Do for listed for name in omit_defaulted_params: if name in kw_args and kw_args[name] == sig.parameters[name].default: del kw_args[p.name] # Return positional args as tuple and kwargs as dict return tuple(pos_args), dict(kw_args)
def linkcode_resolve(domain, info): """ Find the URL of the GitHub source for dwave-ocean-sdk objects. """ # Based on https://github.com/numpy/numpy/blob/main/doc/source/conf.py # Updated to work on multiple submodules and fall back to next-level # module for objects such as properties if domain != 'py': return None obj = {} obj_inx = 0 obj[obj_inx] = sys.modules.get(info['module']) for part in info['fullname'].split('.'): obj_inx += 1 try: obj[obj_inx] = getattr(obj[obj_inx - 1], part) except Exception: pass # strip decorators, which would resolve to the source of the decorator # https://bugs.python.org/issue34305 for i in range(len(obj)): obj[i] = inspect.unwrap(obj[i]) fn = None for i in range(len(obj) - 1, -1, -1): try: fn = inspect.getsourcefile(obj[i]) if fn: obj_inx = i break except: pass linespec = "" try: source, lineno = inspect.getsourcelines(obj[obj_inx]) if obj_inx != 0: linespec = "#L%d" % (lineno) except Exception: linespec = "" if not fn or not "site-packages" in fn: return None if ".egg" in fn: fn = fn.replace(fn[:fn.index("egg") + len("egg")], "") else: fn = fn.replace(fn[:fn.index("site-packages") + len("site-packages")], "") repo = fn.split("/")[1] if \ (fn.split("/")[1] != "dwave") \ else fn.split("/")[2] pm_module = github_map[repo] pm_ver = versions[github_map[repo]] fn = "https://github.com/dwavesystems/{}/blob/{}{}".format( pm_module, pm_ver, fn) return fn + linespec
def autodoc_skip_member_handler(app, what, name, obj, skip, options): """ Enforce the "exclude-members" option, even in cases where it seems to be ignored by Sphinx. """ excluded = options.get('exclude-members', set()) if excluded: # Either it's a one-item set with the string passed in conf.py try: excluded, = excluded # Or it's an already-processed set except ValueError: pass else: excluded = exclude_members_option(excluded) # Import conf.py Sphinx configuration, since the "excluded-members" option # can be overriden by the user in ReST directives. import conf default_excluded = exclude_members_option( conf.autodoc_default_options.get('exclude-members', '') ) excluded = excluded | default_excluded name = name.split('.')[-1] unwrapped = inspect.unwrap(obj) # Get rid of the default implementation of dunder names, since it adds no # value in the documentation if any( hasattr(cls, name) and getattr(cls, name) in (obj, unwrapped) # providers of "uninteresting" methods that are useless in our # documentation for cls in ( object, type, abc.ABC, abc.ABCMeta, ) ): return True # Some classes like ABCMeta are more sneaky so also ban things that are # just builtin functions elif any( type_ in map(type, (obj, unwrapped)) for type_ in ( # Work with multiple Python versions getattr(types, type_name) for type_name in ( 'BuiltinFunctionType', 'BuiltinMethodType', 'WrapperDescriptorType', 'MethodWrapperType', 'MethodDescriptorType', 'ClassMethodDescriptorType', 'GetSetDescriptorType', 'MemberDescriptorType', ) if hasattr(types, type_name) ) ): return True # Dunder names without any doc are of no interest, they are probably just # implementation details elif name.startswith('__') and name.endswith('__') and not inspect.getdoc(obj): return True elif name in excluded: return True else: return skip
def depends_on(a: Callable, b: Callable): if unwrap(a) == those and unwrap(b) == these: return False return True
def test_should_return_challenge(self, db_mock: MagicMock, challenge_mock: MagicMock): challenge_mock.query.filter_by.return_value.first.return_value = None result = inspect.unwrap(challenges.create_challenge)(AN_ADMINISTRATOR) assert result == self.A_NEW_CHALLENGE
def typechecked(func=None, *, always=False, _localns: Optional[Dict[str, Any]] = None): """ Perform runtime type checking on the arguments that are passed to the wrapped function. The return value is also checked against the return annotation if any. If the ``__debug__`` global variable is set to ``False``, no wrapping and therefore no type checking is done, unless ``always`` is ``True``. This can also be used as a class decorator. This will wrap all type annotated methods in the class with this decorator. :param func: the function or class to enable type checking for :param always: ``True`` to enable type checks even in optimized mode """ if func is None: return partial(typechecked, always=always, _localns=_localns) if not __debug__ and not always: # pragma: no cover return func if isclass(func): prefix = func.__qualname__ + '.' for key, attr in func.__dict__.items(): if inspect.isfunction(attr) or inspect.ismethod( attr) or inspect.isclass(attr): if attr.__qualname__.startswith(prefix) and getattr( attr, '__annotations__', None): setattr( func, key, typechecked(attr, always=always, _localns=func.__dict__)) elif isinstance(attr, (classmethod, staticmethod)): if getattr(attr.__func__, '__annotations__', None): wrapped = typechecked(attr.__func__, always=always, _localns=func.__dict__) setattr(func, key, type(attr)(wrapped)) return func # Find the frame in which the function was declared, for resolving forward references later if _localns is None: _localns = sys._getframe(1).f_locals # Find either the first Python wrapper or the actual function python_func = inspect.unwrap(func, stop=lambda f: hasattr(f, '__code__')) if not getattr(func, '__annotations__', None): warn('no type annotations present -- not typechecking {}'.format( function_name(func))) return func def wrapper(*args, **kwargs): memo = _CallMemo(python_func, _localns, args=args, kwargs=kwargs) check_argument_types(memo) retval = func(*args, **kwargs) check_return_type(retval, memo) # If a generator is returned, wrap it if its yield/send/return types can be checked if inspect.isgenerator(retval) or isasyncgen(retval): return_type = memo.type_hints.get('return') if return_type: origin = getattr(return_type, '__origin__', None) if origin in generator_origin_types: return TypeCheckedGenerator(retval, memo) elif origin is not None and origin in asyncgen_origin_types: return TypeCheckedAsyncGenerator(retval, memo) return retval async def async_wrapper(*args, **kwargs): memo = _CallMemo(python_func, _localns, args=args, kwargs=kwargs) check_argument_types(memo) retval = await func(*args, **kwargs) check_return_type(retval, memo) return retval if inspect.iscoroutinefunction(func): if python_func.__code__ is not async_wrapper.__code__: return wraps(func)(async_wrapper) else: if python_func.__code__ is not wrapper.__code__: return wraps(func)(wrapper) # the target callable was already wrapped return func
def normalize_function( target: Callable, args: Tuple[Any], kwargs: Optional[Dict[str, Any]] = None, arg_types: Optional[Tuple[Any]] = None, kwarg_types: Optional[Dict[str, Any]] = None, normalize_to_only_use_kwargs: bool = False ) -> Optional[ArgsKwargsPair]: """ Returns normalized arguments to PyTorch functions. This means that `args/kwargs` will be matched up to the functional's signature and return exclusively kwargs in positional order if `normalize_to_only_use_kwargs` is True. Also populates default values. Does not support positional-only parameters or varargs parameters (*args, **kwargs). Does not support modules. May require `arg_types` and `kwarg_types` in order to disambiguate overloads. Args: target (Callable): Function that we are normalizing args (Tuple[Any]): Tuple of args to the function kwargs (Optional[Dict[str, Any]]): Dict of kwargs to the function arg_types (Optional[Tuple[Any]]): Tuple of arg types for the args kwarg_types (Optional[Dict[str, Any]]): Dict of arg types for the kwargs normalize_to_only_use_kwargs (bool): Whether to normalize to only use kwargs. Returns: Returns normalized_args_and_kwargs, or `None` if not successful. """ if kwargs is None: kwargs = {} new_args_and_kwargs = None if target in boolean_dispatched or target.__module__ in [ 'torch.nn.functional', 'torch.functional' ]: target_for_analysis = target if target in boolean_dispatched: # HACK: `boolean_dispatch` as used in `torch.nn.functional` makes it so that we have # a 2-way dispatch based on a boolean value. Here we check that the `true` and `false` # branches of the dispatch have exactly the same signature. If they do, use the `true` # branch signature for analysis. Otherwise, leave this un-normalized assert not isinstance(target, str) dispatched = boolean_dispatched[target] if_true, if_false = dispatched['if_true'], dispatched['if_false'] if inspect.signature(if_true).parameters != inspect.signature( if_false).parameters: return None target_for_analysis = if_true assert callable(target_for_analysis) sig = inspect.signature(inspect.unwrap(target_for_analysis)) new_args_and_kwargs = _args_kwargs_to_normalized_args_kwargs( sig, args, kwargs, normalize_to_only_use_kwargs) else: assert callable(target) torch_op_schemas = get_signature_for_torch_op(target) matched_schemas = [] if torch_op_schemas: # Iterate through all of the schema until we find one that matches # If one matches, populate `new_args_and_kwargs` with the new args/kwargs # values. If none matches, `new_args_and_kwargs` will be None for candidate_signature in torch_op_schemas: try: candidate_signature.bind(*args, **kwargs) matched_schemas.append(candidate_signature) except TypeError as e: continue if len(matched_schemas) == 0: # Did not match any schema. Cannot normalize pass elif len(matched_schemas) == 1: # Matched exactly one schema, unambiguous new_args_and_kwargs = _args_kwargs_to_normalized_args_kwargs( matched_schemas[0], args, kwargs, normalize_to_only_use_kwargs) else: if arg_types is not None or kwarg_types is not None: arg_types = arg_types if arg_types else cast( Tuple[Any], ()) kwarg_types = kwarg_types if kwarg_types else {} for candidate_signature in torch_op_schemas: try: bound_types = candidate_signature.bind( *arg_types, **kwarg_types) except TypeError as e: continue sig_matches = True for arg_name, arg_type in bound_types.arguments.items( ): param = candidate_signature.parameters[arg_name] sig_matches = sig_matches and type_matches( param.annotation, arg_type) if sig_matches: new_args_and_kwargs = _args_kwargs_to_normalized_args_kwargs( candidate_signature, args, kwargs, normalize_to_only_use_kwargs) break else: # Matched more than one schema. In this situation, the caller must provide the types of # the arguments of the overload they expect. schema_printouts = '\n'.join( str(schema) for schema in matched_schemas) raise RuntimeError( f'Tried to normalize arguments to {torch.typename(target)} but ' f'the schema match was ambiguous! Please provide argument types to ' f'the normalize_arguments() call. Available schemas:\n{schema_printouts}' ) return new_args_and_kwargs
def __call__(self, f): """ Decorator for methods that require some given trace events :param events: The list of required events :type events: list(str or TraceEventCheckerBase) The decorated method must operate on instances that have a ``self.trace`` attribute. If some event requirements have already been defined for it (it has a `used_events` attribute, i.e. it has already been decorated), these will be combined with the new requirements using an :class`AndTraceEventChecker`. """ def unwrap_down_to(obj): return hasattr(obj, 'used_events') try: # we want to see through all other kinds of wrappers, down to the # one that matters to us unwrapped_f = inspect.unwrap(f, stop=unwrap_down_to) used_events = unwrapped_f.used_events except AttributeError: checker = self else: # Update the existing checker inplace to avoid adding an extra # level of wrappers. checker = AndTraceEventChecker([self, used_events]) unwrapped_f.used_events = checker return f sig = inspect.signature(f) if self.check and sig.parameters: @wraps(f) def wrapper(self, *args, **kwargs): try: trace = self.trace # If there is no "trace" attribute, silently skip the check. This # allows using the decorator for documentation and chaining purpose # without having an actual trace to work on. except AttributeError: pass else: available_events = set(trace.available_events) checker.check_events(available_events) return f(self, *args, **kwargs) # If the decorated object takes no parameters, we cannot check anything else: @wraps(f) def wrapper(*args, **kwargs): return f(*args, **kwargs) # Set an attribute on the wrapper itself, so it can be e.g. added # to the method documentation wrapper.used_events = checker return wrapper
def _params(func_obj, annotate=False, link=None, module=None): try: signature = inspect.signature(inspect.unwrap(func_obj)) except ValueError: # I guess this is for C builtin functions? return ["..."] def safe_default_value(p: inspect.Parameter): if p.default is inspect.Parameter.empty: return p replacement = None if p.default is os.environ: replacement = 'os.environ' elif inspect.isclass(p.default): replacement = p.default.__module__ + '.' + p.default.__qualname__ elif ' at 0x' in repr(p.default): replacement = re.sub(r' at 0x\w+', '', repr(p.default)) nonlocal link if link and ('<' in repr(p.default) or '>' in repr(p.default)): import html replacement = html.escape(replacement or p.default) if replacement: class mock: def __repr__(self): return replacement return p.replace(default=mock()) return p params = [] kw_only = False EMPTY = inspect.Parameter.empty if link: from pdoc.html_helpers import _linkify _linkify = partial(_linkify, link=link, module=module) for p in signature.parameters.values(): # type: inspect.Parameter if not _is_public(p.name) and p.default is not EMPTY: continue if p.kind == p.VAR_POSITIONAL: kw_only = True if p.kind == p.KEYWORD_ONLY and not kw_only: kw_only = True params.append('*') p = safe_default_value(p) if not annotate: p = p.replace(annotation=EMPTY) s = str(p) if p.annotation is not EMPTY: if sys.version_info < (3, 7): # PEP8-normalize whitespace s = re.sub(r'(?<!\s)=(?!\s)', ' = ', re.sub(r':(?!\s)', ': ', s, 1), 1) # "Eval" forward-declarations (typing string literals) s = re.sub(r'(?<=: )[\'"]|[\'"](?= = )', '', s, 2) s = s.replace(' ', '\N{NBSP}') # prevent improper line breaking if link: s = re.sub(r'[\w\.]+', _linkify, s) params.append(s) return params
def is_generator_fixture(self): return inspect.isgeneratorfunction(inspect.unwrap(self.fn))
def test_given_invalid_challenge_id_should_raise_unprocessable_entity_error( self, challenge_mock: MagicMock): challenge_mock.query.filter_by.return_value.first.return_value = None with raises(errors.UnprocessableEntity): inspect.unwrap(challenges.delete_challenge)(AN_ADMINISTRATOR, -1)
def is_from_module(obj, module): """Return whether `obj` is from module `module`.""" mod = inspect.getmodule(inspect.unwrap(obj)) return mod is None or mod.__name__ == module.__name__
def process_signature(app, what: str, name: str, obj, options, signature, return_annotation): if not callable(obj): return original_obj = obj if inspect.isclass(obj): obj = getattr(obj, '__init__', getattr(obj, '__new__', None)) if not getattr(obj, '__annotations__', None): return obj = inspect.unwrap(obj) signature = Signature(obj) parameters = [ param.replace(annotation=inspect.Parameter.empty) for param in signature.parameters.values() ] # The generated dataclass __init__() and class are weird and need extra checks # This helper function operates on the generated class and methods # of a dataclass, not an instantiated dataclass object. As such, # it cannot be replaced by a call to `dataclasses.is_dataclass()`. def _is_dataclass(name: str, what: str, qualname: str) -> bool: if what == 'method' and name.endswith('.__init__'): # generated __init__() return True if what == 'class' and qualname.endswith('.__init__'): # generated class return True return False if '<locals>' in obj.__qualname__ and not _is_dataclass( name, what, obj.__qualname__): logger.warning( 'Cannot treat a function defined as a local function: "%s" (use @functools.wraps)', name) return if parameters: if inspect.isclass(original_obj) or (what == 'method' and name.endswith('.__init__')): del parameters[0] elif what == 'method': outer = inspect.getmodule(obj) for clsname in obj.__qualname__.split('.')[:-1]: outer = getattr(outer, clsname) method_name = obj.__name__ if method_name.startswith("__") and not method_name.endswith("__"): # If the method starts with double underscore (dunder) # Python applies mangling so we need to prepend the class name. # This doesn't happen if it always ends with double underscore. class_name = obj.__qualname__.split('.')[-2] method_name = "_{c}{m}".format(c=class_name, m=method_name) method_object = outer.__dict__[method_name] if outer else obj if not isinstance(method_object, (classmethod, staticmethod)): del parameters[0] signature = signature.replace(parameters=parameters, return_annotation=inspect.Signature.empty) return stringify_signature(signature).replace('\\', '\\\\'), None
# %% myfunc.__wrapped__() # what will i get ? # %% myfunc.__wrapped__.__wrapped__() # %% """The way it is implemented in the source code : for each stacked decorator, the __wrapped__ attribute of the wrapper points directly to the function it wraps""" # %% """write a decorator that multiplies the result by two""" #%% import inspect # it seems inspect follows the wrapped chain to return the original function. # More analysis on the inspect module would be nice inspect.unwrap(myfunc)() # %% """ Further investigation shows that in python 3.5, the @wraps appears to preserve signature but does have issues. Some developers created other packages to really preserve the signature. More work to understand signature is needed to see if functools.wraps actually does what it is supposed to do or if there are obscure cases where it does not work. I dont know if the issue exists in new versions of python On a first approach, i think that we should probably do as if it works, because it is what is widely used but maybe we can have unexpected issues with decorators https://stackoverflow.com/questions/308999/what-does-functools-wraps-do/55102697#55102697 """ # %% inspect.signature(myfunc) # %% inspect.signature(myfunc, follow_wrapped=False) # %%
def process_docstring(app, what, name, obj, options, lines): original_obj = obj if isinstance(obj, property): obj = obj.fget if callable(obj): if inspect.isclass(obj): obj = getattr(obj, '__init__') obj = inspect.unwrap(obj) type_hints = get_all_type_hints(obj, name) for argname, annotation in type_hints.items(): if argname == 'return': continue # this is handled separately later if argname.endswith('_'): argname = '{}\\_'.format(argname[:-1]) formatted_annotation = format_annotation( annotation, fully_qualified=app.config.typehints_fully_qualified, simplify_optional_unions=app.config.simplify_optional_unions) searchfor = [ ':{} {}:'.format(field, argname) for field in ('param', 'parameter', 'arg', 'argument') ] insert_index = None for i, line in enumerate(lines): if any( line.startswith(search_string) for search_string in searchfor): insert_index = i break if insert_index is None and app.config.always_document_param_types: lines.append(':param {}:'.format(argname)) insert_index = len(lines) if insert_index is not None: lines.insert( insert_index, ':type {}: {}'.format(argname, formatted_annotation)) if 'return' in type_hints and not inspect.isclass(original_obj): # This avoids adding a return type for data class __init__ methods if what == 'method' and name.endswith('.__init__'): return formatted_annotation = format_annotation( type_hints['return'], fully_qualified=app.config.typehints_fully_qualified, simplify_optional_unions=app.config.simplify_optional_unions) insert_index = len(lines) for i, line in enumerate(lines): if line.startswith(':rtype:'): insert_index = None break elif line.startswith(':return:') or line.startswith( ':returns:'): insert_index = i if insert_index is not None and app.config.typehints_document_rtype: if insert_index == len(lines): # Ensure that :rtype: doesn't get joined with a paragraph of text, which # prevents it being interpreted. lines.append('') insert_index += 1 lines.insert(insert_index, ':rtype: {}'.format(formatted_annotation))
def dec(func): params, varargs, varkw, defaults, kwonly, kwonly_defaults, _ = getfullargspec(unwrap(func)) function_name = (name or getattr(func, '_decorated_function', func).__name__) @functools.wraps(func) def compile_func(parser, token): bits = token.split_contents()[1:] target_var = None if len(bits) >= 2 and bits[-2] == 'as': target_var = bits[-1] bits = bits[:-2] args, kwargs = parse_bits( parser, bits, params, varargs, varkw, defaults, kwonly, kwonly_defaults, takes_context, function_name, ) return SimpleNode(func, takes_context, args, kwargs, target_var) self.tag(function_name, compile_func) return func
def create_args_for_root(self, root_fn, is_module, concrete_args=None): """ Create ``placeholder`` nodes corresponding to the signature of the ``root`` Module. This method introspects root's signature and emits those nodes accordingly, also supporting ``*args`` and ``**kwargs``. """ # In some cases, a function or method has been decorated with a wrapper # defined via ``functools.wraps``. In this case, the outer code object # will likely not contain the actual parameters we care about, so unwrap # the function to get to the innermost callable. fn_for_analysis = inspect.unwrap(root_fn) co = fn_for_analysis.__code__ total_args = co.co_argcount + co.co_kwonlyargcount orig_args = list(co.co_varnames) names_iter = iter(co.co_varnames) args: List[Any] = [] skip_arg_idx = 0 if is_module: if total_args == 0: raise RuntimeError( '``self`` argument cannot be part of *args expansion!') skip_arg_idx = 1 next(names_iter) # skip self args.append(self.root) sig = inspect.signature(fn_for_analysis) def proxy_placeholder(name: str): if concrete_args is not None and name in concrete_args: cnt = 0 def replace_ph(x): nonlocal cnt cnt += 1 param = sig.parameters[name] default = ( ) if param.default is inspect.Parameter.empty else ( param.default, ) out = self.create_proxy('placeholder', f'{name}_{str(cnt)}', default, {}) if x == PH: return out # Union[int, bool] == bool in Python <= 3.6 if type(x) == bool or type( x) in base_types and type(x) != torch.Tensor: torch._assert( out == x, f"{name} has been specialized to have value {x}") else: torch.warnings.warn( "Was not able to add assertion to guarantee correct inputs to " "specialized function. It is up to the user to make sure that your inputs match the " "inputs you specialized the function with.") return x return pytree.tree_map(replace_ph, concrete_args[name]) if name[0] == '*': default = () else: param = sig.parameters[name] default = () if param.default is inspect.Parameter.empty else ( param.default, ) # type: ignore[assignment] return self.create_proxy( 'placeholder', name, default, {}, type_expr=fn_for_analysis.__annotations__.get(name, None)) arg_names = [ next(names_iter) for idx in range(skip_arg_idx, total_args) ] if isinstance(concrete_args, tuple): assert (len(arg_names) == len(concrete_args)) concrete_args = { name: val for name, val in zip(arg_names, concrete_args) } args.extend(proxy_placeholder(names) for names in arg_names) if co.co_kwonlyargcount > 0 or co.co_flags & HAS_VARSTUFF: # TODO: type annotations for *args and **kwargs if co.co_flags & inspect.CO_VARARGS: args.append(proxy_placeholder('*' + next(names_iter))) if co.co_flags & inspect.CO_VARKEYWORDS: args.append(proxy_placeholder('**' + next(names_iter))) root_fn = _patch_function(root_fn, len(args)) flat_args, in_spec = pytree.tree_flatten(tuple(args)) if any(not isinstance(i, pytree.LeafSpec) for i in in_spec.children_specs): # In the case that we have pytree-flattened inputs in # `concrete_args`, generate a flattening wrapper around the # original root function and return that. self.graph._pytree_info = _PyTreeInfo(orig_args[:total_args], in_spec, None) def flatten_fn(*args): tree_args = pytree.tree_unflatten(list(args), in_spec) tree_out = root_fn(*tree_args) out_args, out_spec = pytree.tree_flatten(tree_out) assert (self.graph._pytree_info is not None) self.graph._pytree_info = self.graph._pytree_info._replace( out_spec=out_spec) return out_args return flatten_fn, flat_args return root_fn, args
def is_async_generator_fixture(self): return inspect.isasyncgenfunction(inspect.unwrap(self.fn))
def test_given_an_administrator_of_another_event_should_raise_unauthorized_error( self, challenge_mock: MagicMock, flag_mock: MagicMock): with raises(errors.Unauthorized): inspect.unwrap(challenges.edit_challenge)(ANOTHER_ADMINISTRATOR, 1)
def is_coroutine_fixture(self): return inspect.iscoroutinefunction(inspect.unwrap(self.fn))
def test_given_already_used_name_should_raise_unprocessable_entity_error( self, challenge_mock: MagicMock): challenge_mock.query.filter_by.return_value.first.return_value = A_CHALLENGE with raises(errors.UnprocessableEntity): inspect.unwrap(challenges.create_challenge)(AN_ADMINISTRATOR)
def process_docstring(app, what, name, obj, options, lines): if isinstance(obj, property): obj = obj.fget if callable(obj): if what in ('class', 'exception'): obj = getattr(obj, '__init__') obj = inspect.unwrap(obj) type_hints = get_all_type_hints(obj, name) for argname, annotation in type_hints.items(): if argname == 'return': continue # this is handled separately later if argname.endswith('_'): argname = '{}\\_'.format(argname[:-1]) formatted_annotation = format_annotation( annotation, fully_qualified=app.config.typehints_fully_qualified) searchfor = ':param {}:'.format(argname) insert_index = None for i, line in enumerate(lines): if line.startswith(searchfor): insert_index = i break if insert_index is None and app.config.always_document_param_types: lines.append(searchfor) insert_index = len(lines) if insert_index is not None: lines.insert( insert_index, ':type {}: {}'.format(argname, formatted_annotation)) if 'return' in type_hints and what not in ('class', 'exception'): formatted_annotation = format_annotation( type_hints['return'], fully_qualified=app.config.typehints_fully_qualified) insert_index = len(lines) for i, line in enumerate(lines): if line.startswith(':rtype:'): insert_index = None break elif line.startswith(':return:') or line.startswith( ':returns:'): insert_index = i if insert_index is not None: if insert_index == len(lines): # Ensure that :rtype: doesn't get joined with a paragraph of text, which # prevents it being interpreted. lines.append('') insert_index += 1 lines.insert(insert_index, ':rtype: {}'.format(formatted_annotation))
def is_decorated(cls, f): '''Returns True if the given function is decorated with @view_function''' real_func = inspect.unwrap(f) return real_func in cls.DECORATED_FUNCTIONS
def _params(func_obj, annotate=False, link=None, module=None): try: signature = inspect.signature(inspect.unwrap(func_obj)) except ValueError: # I guess this is for C builtin functions? # TODO: Extract signature from the first line of the docstring, i.e. # https://github.com/mitmproxy/pdoc/commit/010d996003bc5b72fcf5fa515edbcc0142819919 return ["..."] def safe_default_value(p: inspect.Parameter): value = p.default if value is inspect.Parameter.empty: return p replacement = next((i for i in ( 'os.environ', 'sys.stdin', 'sys.stdout', 'sys.stderr', ) if value is eval(i)), None) if not replacement: if isinstance(value, enum.Enum): replacement = str(value) elif inspect.isclass(value): replacement = value.__module__ + '.' + value.__qualname__ elif ' at 0x' in repr(value): replacement = re.sub(r' at 0x\w+', '', repr(value)) nonlocal link if link and ('<' in repr(value) or '>' in repr(value)): import html replacement = html.escape(replacement or repr(value)) if replacement: class mock: def __repr__(self): return replacement return p.replace(default=mock()) return p params = [] kw_only = False EMPTY = inspect.Parameter.empty if link: from app.pdoc.html_helpers import _linkify _linkify = partial(_linkify, link=link, module=module) for p in signature.parameters.values(): # type: inspect.Parameter if not _is_public(p.name) and p.default is not EMPTY: continue if p.kind == p.VAR_POSITIONAL: kw_only = True if p.kind == p.KEYWORD_ONLY and not kw_only: kw_only = True params.append('*') p = safe_default_value(p) if not annotate: p = p.replace(annotation=EMPTY) s = str(p) if p.annotation is not EMPTY: if sys.version_info < (3, 7): # PEP8-normalize whitespace s = re.sub(r'(?<!\s)=(?!\s)', ' = ', re.sub(r':(?!\s)', ': ', s, 1), 1) # "Eval" forward-declarations (typing string literals) s = re.sub(r'(?<=: )[\'"]|[\'"](?= = )', '', s, 2) s = s.replace(' ', '\N{NBSP}') # prevent improper line breaking if link: s = re.sub(r'[\w\.]+', _linkify, s) params.append(s) return params
def is_async_test(self) -> bool: return inspect.iscoroutinefunction(inspect.unwrap(self.fn))
def inline_source(reference, include_def=True, include_docstring=True, indent_level=0, show_line_numbers=True, highlight_lines=None): """ Macro to embed the source code of the given reference into mkdocs. Parameters ---------- reference : str The dotted path to the object whose source is to be displayed. include_def : bool If True, include the definition of the class, function, or method. include_docstring If True, include the docstring of the class, function, or method. indent_level : int The baseline indentation for the source. show_line_numbers : bool If True, display the line numbers of the source code. highlight_lines : sequence or None If provided, line numbers to be highlighted in the displayed source. Returns ------- str Markdown-formatted source-code for the given reference. """ obj = get_object_from_reference(reference) obj = inspect.unwrap(obj) source = ''.join(inspect.getsourcelines(obj)[0]) re_declaration = re.compile(r'^(.+?):', flags=(re.DOTALL | re.MULTILINE)) re_docstring = re.compile(r'(""".+?""")', flags=(re.DOTALL | re.MULTILINE)) if not include_def: source = re_declaration.sub('', source, count=1) if not include_docstring: source = re_docstring.sub('', source, count=1) source = textwrap.dedent(source) source = source.strip() indent = indent_level * ' ' line_numbers = ' linenums="1"' if show_line_numbers else '' if highlight_lines is not None: hl_lines = ' hl_lines="' + ' '.join( [str(i) for i in highlight_lines]) + '"' else: hl_lines = '' result = f'```python{line_numbers}{hl_lines}\n{source}\n```' return textwrap.indent(result, indent)