コード例 #1
0
ファイル: test_support.py プロジェクト: Pitje06/symfit
    def test_keywordonly_inheritance(self):
        """
        Tests if the decorator deals with inheritance properly.
        """
        kinds_B = {
            'self': inspect_sig.Parameter.POSITIONAL_OR_KEYWORD,
            'args': inspect_sig.Parameter.VAR_POSITIONAL,
            'kwargs': inspect_sig.Parameter.VAR_KEYWORD,
            'e': inspect_sig.Parameter.KEYWORD_ONLY,
        }
        kinds_A = {
            'self': inspect_sig.Parameter.POSITIONAL_OR_KEYWORD,
            'a': inspect_sig.Parameter.POSITIONAL_OR_KEYWORD,
            'b': inspect_sig.Parameter.POSITIONAL_OR_KEYWORD,
            'kwargs': inspect_sig.Parameter.VAR_KEYWORD,
            'c': inspect_sig.Parameter.KEYWORD_ONLY,
            'd': inspect_sig.Parameter.KEYWORD_ONLY,
        }
        sig_B = inspect_sig.signature(self._B.__init__)
        for param in sig_B.parameters.values():
            self.assertTrue(param.kind == kinds_B[param.name])
        self.assertEqual(len(sig_B.parameters), len(kinds_B))

        sig_A = inspect_sig.signature(self._A.__init__)
        for param in sig_A.parameters.values():
            self.assertTrue(param.kind == kinds_A[param.name])
        self.assertEqual(len(sig_A.parameters), len(kinds_A))

        with self.assertRaises(TypeError):
            b = self._B(3, 5, 7, d=2, e=6)
コード例 #2
0
ファイル: processors.py プロジェクト: watsonpy/watson-di
 def instantiate(self, definition):
     item = definition['item']
     if hasattr(item, '__ioc_definition__'):
         definition.update(item.__ioc_definition__)
     args, kwargs = [], {}
     is_lambda = definition.get('call_type', None) == FUNCTION_TYPE
     sig = signature(item)
     if 'container' in sig.parameters:
         kwargs['container'] = self.container
     if 'init' in definition:
         init = definition['init']
         updated_args, updated_kwargs = self.get_args_kwargs(init)
         args.extend(updated_args)
         kwargs.update(updated_kwargs)
         if isfunction(init):
             sig = signature(init)
             if 'container' in sig.parameters:
                 kwargs['container'] = self.container
             init = init(*args, **kwargs)
             definition['init'] = init
         if not is_lambda:
             args, kwargs = self.get_args_kwargs(init)
     item = item(*args, **kwargs)
     if is_lambda and isinstance(item, str):
         # Special case for items that might be retrieved via lambda expressions
         with suppress(Exception):
             definition['item'] = self.container.load_item_from_string(item)
             item, args, kwargs = self.instantiate(definition)
     return item, args, kwargs
コード例 #3
0
ファイル: plotfunctions.py プロジェクト: peterstangl/flavio
def band_plot(log_likelihood, x_min, x_max, y_min, y_max,
              n_sigma=1, steps=20, **kwargs):
    r"""This is an alias for `likelihood_contour` which is present for
    backward compatibility."""
    warnings.warn("The `band_plot` function has been replaced "
                  "by `likelihood_contour` (or "
                  "`likelihood_contour_data` in conjunction with `contour`) "
                  "and might be removed in the future. "
                  "Please update your code.", FutureWarning)
    valid_args = inspect.signature(likelihood_contour_data).parameters.keys()
    data_kwargs = {k:v for k,v in kwargs.items() if k in valid_args}
    if 'pre_calculated_z' not in kwargs:
        contour_kwargs = likelihood_contour_data(log_likelihood,
                      x_min, x_max, y_min, y_max,
                      n_sigma, steps, **data_kwargs)
    else:
        contour_kwargs = {}
        nx, ny = kwargs['pre_calculated_z'].shape
        _x = np.linspace(x_min, x_max, nx)
        _y = np.linspace(y_min, y_max, ny)
        x, y = np.meshgrid(_x, _y)
        contour_kwargs['x'] = x
        contour_kwargs['y'] = y
        contour_kwargs['z'] = kwargs['pre_calculated_z']
        if isinstance(n_sigma, Number):
            contour_kwargs['levels'] = [delta_chi2(n_sigma, dof=2)]
        else:
            contour_kwargs['levels'] = [delta_chi2(n, dof=2) for n in n_sigma]
    valid_args = inspect.signature(contour).parameters.keys()
    contour_kwargs.update({k:v for k,v in kwargs.items() if k in valid_args})
    contour(**contour_kwargs)
    return contour_kwargs['x'], contour_kwargs['y'], contour_kwargs['z']
コード例 #4
0
ファイル: embedding.py プロジェクト: cjbe/artiq
    def _quote_rpc(self, function, loc):
        if isinstance(function, SpecializedFunction):
            host_function = function.host_function
        else:
            host_function = function
        ret_type = builtins.TNone()

        if isinstance(host_function, pytypes.BuiltinFunctionType):
            pass
        elif (isinstance(host_function, pytypes.FunctionType) or \
              isinstance(host_function, pytypes.MethodType)):
            if isinstance(host_function, pytypes.FunctionType):
                signature = inspect.signature(host_function)
            else:
                # inspect bug?
                signature = inspect.signature(host_function.__func__)
            if signature.return_annotation is not inspect.Signature.empty:
                ret_type = self._extract_annot(host_function, signature.return_annotation,
                                               "return type", loc, is_syscall=False)
        else:
            assert False

        function_type = types.TRPC(ret_type,
                                   service=self.embedding_map.store_object(host_function))
        self.functions[function] = function_type
        return function_type
コード例 #5
0
ファイル: functors.py プロジェクト: vhb/pyrser
    def checkParam(self, params: list):
        if not hasattr(self.__class__, "begin") or not hasattr(self.__class__, "end"):
            return False
        sbegin = inspect.signature(self.begin)
        send = inspect.signature(self.end)

        idx = 0
        for param in list(sbegin.parameters.values())[1:]:
            if idx >= len(params) and param.default is inspect.Parameter.empty:
                raise RuntimeError(
                    "{}: No parameter given to begin"
                    " method for argument {}, expected {}".format(self.__class__.__name__, idx, param.annotation)
                )
            elif idx < len(params) and not isinstance(params[idx], param.annotation):
                raise TypeError(
                    "{}: Wrong parameter in begin method parameter {} "
                    "expected {} got {}".format(self.__class__.__name__, idx, type(params[idx]), param.annotation)
                )
            idx += 1

        idx = 0
        for param in list(send.parameters.values())[1:]:
            if idx >= len(params) and param.default is inspect.Parameter.empty:
                raise RuntimeError(
                    "{}: No parameter given to end"
                    " method for argument {}, expected {}".format(self.__class__.__name__, idx, param.annotation)
                )
            elif idx < len(params) and not isinstance(params[idx], param.annotation):
                raise TypeError(
                    "{}: Wrong parameter in end method parameter {} "
                    "expected {} got {}".format(self.__class__.__name__, idx, type(params[idx]), param.annotation)
                )
            idx += 1

        return True
コード例 #6
0
ファイル: tagger.py プロジェクト: wangzhenya/DeepPavlov
    def __init__(self, save_path: str = None, load_path: str = None, mode: str = None, **kwargs):
        # Calls parent constructor. Results in creation of save_folder if it doesn't exist
        super().__init__(save_path=save_path, load_path=load_path, mode=mode)

        # Dicts are mutable! To prevent changes in config dict outside this class
        # we use deepcopy
        opt = copy.deepcopy(kwargs)

        # Finds all input parameters of the network __init__ to pass them into network later
        network_parameter_names = list(inspect.signature(CharacterTagger.__init__).parameters)
        # Fills all provided parameters from opt (opt is a dictionary formed from the model
        # json config file, except the "name" field)
        network_parameters = {par: opt[par] for par in network_parameter_names if par in opt}

        self._net = CharacterTagger(**network_parameters)

        # Finds all parameters for network train to pass them into train method later
        train_parameters_names = list(inspect.signature(self._net.train_on_batch).parameters)

        # Fills all provided parameters from opt
        train_parameters = {par: opt[par] for par in train_parameters_names if par in opt}
        self.train_parameters = train_parameters
        self.opt = opt

        # Tries to load the model from model `load_path`, if it is available
        self.load()
コード例 #7
0
ファイル: __init__.py プロジェクト: Iotic-Labs/py-IoticAgent
 def arg_checker(func, *args, **kwargs):
     try:
         signature(func).bind(*args, **kwargs)
     except TypeError:
         return False
     else:
         return True
コード例 #8
0
ファイル: generic_utils.py プロジェクト: marcotcr/lime
def has_arg(fn, arg_name):
    """Checks if a callable accepts a given keyword argument.

    Args:
        fn: callable to inspect
        arg_name: string, keyword argument name to check

    Returns:
        bool, whether `fn` accepts a `arg_name` keyword argument.
    """
    if sys.version_info < (3,):
        if isinstance(fn, types.FunctionType) or isinstance(fn, types.MethodType):
            arg_spec = inspect.getargspec(fn)
        else:
            try:
                arg_spec = inspect.getargspec(fn.__call__)
            except AttributeError:
                return False
        return (arg_name in arg_spec.args)
    elif sys.version_info < (3, 6):
        arg_spec = inspect.getfullargspec(fn)
        return (arg_name in arg_spec.args or
                arg_name in arg_spec.kwonlyargs)
    else:
        try:
            signature = inspect.signature(fn)
        except ValueError:
            # handling Cython
            signature = inspect.signature(fn.__call__)
        parameter = signature.parameters.get(arg_name)
        if parameter is None:
            return False
        return (parameter.kind in (inspect.Parameter.POSITIONAL_OR_KEYWORD,
                                   inspect.Parameter.KEYWORD_ONLY))
コード例 #9
0
ファイル: base_component.py プロジェクト: plotly/dash
def _explicitize_args(func):
    # Python 2
    if hasattr(func, 'func_code'):
        varnames = func.func_code.co_varnames
    # Python 3
    else:
        varnames = func.__code__.co_varnames

    def wrapper(*args, **kwargs):
        if '_explicit_args' in kwargs.keys():
            raise Exception('Variable _explicit_args should not be set.')
        kwargs['_explicit_args'] = \
            list(
                set(
                    list(varnames[:len(args)]) + [k for k, _ in kwargs.items()]
                )
            )
        if 'self' in kwargs['_explicit_args']:
            kwargs['_explicit_args'].remove('self')
        return func(*args, **kwargs)

    # If Python 3, we can set the function signature to be correct
    if hasattr(inspect, 'signature'):
        # pylint: disable=no-member
        new_sig = inspect.signature(wrapper).replace(
            parameters=inspect.signature(func).parameters.values()
        )
        wrapper.__signature__ = new_sig
    return wrapper
コード例 #10
0
ファイル: pyoverload.py プロジェクト: Nahnja/PyOverload
        def dispatch(instance, *args, **kwargs):
            for fun in self:
                sig = inspect.signature(fun)
                try:
                    # check if signatures match - number and names of arguments work
                    bound = sig.bind(instance, *args, **kwargs)
                    bound_with_annotation = {
                        arg: [bound.arguments[arg], sig.parameters[arg].annotation]
                        for arg in bound.arguments.keys()
                    }

                    try:
                        # see if annotations are used, if so use them to convert arguments to the expected type
                        for arg, (value, annotation) in bound_with_annotation.items():
                            if (callable(annotation)
                                    and not annotation is inspect.Signature.empty):

                                bound.arguments[args] = annotation(value)
                        # found a proper match!
                        return fun(*bound.args, **bound.kwargs)
                    except ValueError as e:
                        # annotations used, argument types mismatch - try the next function
                        pass

                except TypeError as e:
                    # signatures don't match - try the next function
                    pass

            raise ValueError(
                "no method found to handle passed arguments {} {} {}\n{}".format(
                    instance, args, kwargs,
                    "\n".join(str(inspect.signature(fun)) for fun in self)
                )
            )
コード例 #11
0
ファイル: help.py プロジェクト: jpurma/Kataja
 def method(self, command=None):
     """ Dump keyboard shortcuts to console. At some point, make this to use
     dialog window instead.
     :return: None
     """
     if command:
         # command is probably KatajaAction's run_command -method.
         # then we want its 'method' -method's docstring.
         found = False
         for cls in inspect.getmro(command.__self__.__class__):
             if cls.__dict__.get('method', None):
                 # print('----------------------------')
                 print('<b>' + cls.k_action_uid + str(inspect.signature(cls.method)) + '</b>')
                 print(f'<i>""" {cls.method.__doc__.replace("    ", " ")} """</i>')
                 found = True
                 break
         if not found:
             print(command.__doc__)
     else:
         d = ctrl.ui.actions
         keys = sorted(list(d.keys()))
         print('---------- Available actions ----------')
         for key in keys:
             my_class = d[key].__class__
             command = getattr(my_class, "k_command", "")
             sig = str(inspect.signature(my_class.method))
             if sig.startswith('(self, '):
                 sig = '(' + sig[7:]
             elif sig == '(self)':
                 sig = '()'
             print(f'<b>{key + sig:.<60}</b> : {command}')
         print('---------------------------------------')
         print('<b>help(method_name)</b> for more information.')
コード例 #12
0
 def decorated(*args, **kwargs):
     for param, arg in zip(inspect.signature(f).parameters.values(), args):
         if param.name not in kwargs:
             kwargs[param.name] = arg
     decoded_args = {}
     for param in inspect.signature(f).parameters.values():
         arg = kwargs[param.name]
         if param.kind is not inspect.Parameter.POSITIONAL_OR_KEYWORD:
             raise ValueError('The decode_args function only works for POSITIONAL_OR_KEYWORD parameters, but a {} parameter was found'.format(param.kind))
         if param.annotation is inspect.Parameter.empty or not isinstance(arg, str): # no annotation or a direct function call
             decoded_args[param.name] = arg
         elif param.annotation is Dimension:
             try:
                 int(arg)
             except:
                 decoded_args[param.name] = Dimension[arg]
             else:
                 decoded_args[param.name] = Dimension(int(arg))
         elif param.annotation is Player:
             decoded_args[param.name] = Player(arg)
         elif param.annotation is int:
             decoded_args[param.name] = int(arg)
         elif param.annotation is minecraft.World:
             decoded_args[param.name] = minecraft.World(arg)
         elif param.annotation == 'color':
             decoded_args[param.name] = (int(arg[:2], 16), int(arg[2:4], 16), int(arg[4:6], 16))
         elif isinstance(param.annotation, range):
             if int(arg) not in param.annotation:
                 bottle.abort(403, 'Parameter {} must be in {}'.format(param.name, param.annotation))
             decoded_args[param.name] = int(arg)
         else:
             raise TypeError('The decode_args function is not implemented for the argument type {:?}'.format(param.annotation))
     return f(**decoded_args)
コード例 #13
0
ファイル: compat.py プロジェクト: JohnReeves/hypothesis
def signature_argspec(f):
    from inspect import signature, Parameter, _empty

    try:
        if NO_ARGSPEC:
            sig = signature(f, follow_wrapped=False)
        else:
            sig = signature(f)
    except ValueError:
        raise TypeError("unsupported callable")
    args = list(
        k for k, v in sig.parameters.items() if v.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD)
    )
    varargs = None
    keywords = None
    for k, v in sig.parameters.items():
        if v.kind == Parameter.VAR_POSITIONAL:
            varargs = k
        elif v.kind == Parameter.VAR_KEYWORD:
            keywords = k
    defaults = []
    for a in reversed(args):
        default = sig.parameters[a].default
        if default is _empty:
            break
        else:
            defaults.append(default)
    if defaults:
        defaults = tuple(reversed(defaults))
    else:
        defaults = None
    return FakeArgSpec(args, varargs, keywords, defaults)
コード例 #14
0
ファイル: __init__.py プロジェクト: pombredanne/obiwan
def _check_function_template(obj, template, ctx=""):
    raise ObiwanError("checking function templates is not supported yet")
    obj_ret = None
    if inspect.isclass(obj): # classes are callable too
        obj_ret = obj
        obj = obj.__init__
    if not inspect.isfunction(obj):
        raise ObiwanException("%s %s is not a function" % (ctx, obj))
    try:
        expect = inspect.signature(template)
    except TypeError:
        raise ObiwanException("%s template %s is not a function" % (ctx, template))
    except ValueError:
        raise ObiwanException("%s cannot extract signature from template %s" % (ctx, template))
    try:
        got = inspect.signature(obj)
    except (TypeError, ValueError):
        raise ObiwanException("%s cannot extract signature from %s" % (ctx, obj))
    expect_return = expect.return_annotation if expect.return_annotation is not inspect.Signature.empty else None
    got_return = obj_ret or (got.return_annotation if got.return_annotation is not inspect.Signature.empty else None)
    if bool(expect_return) != bool(got_return) and expect_return is not any:
        raise ObiwanException("%s function %s does not return %s" % (ctx, got, expect_return))
    ##### We need to check signatures not values
    if expect_return and expect_return is not any:
        duckable(got_return, expect_return, "%s return" % ctx)
    expect_params = expect.parameters.values()
    got_params = got.parameters.values()
    if len(expect_params) != len(got_params):
        raise ObiwanException("%s function %s mismatches %s" % (ctx, got, expect))
    for i, (g, e) in enumerate(zip(got_params, expect_params)):
        if e.annotation is inspect.Parameter.empty or e.annotation is any:
            continue
        if g.annotation is inspect.Parameter.empty:
            raise ObiwanException("%s function %s parameter %s should be %s but is unannotated" % (ctx, got, i, e.annotation))
        duckable(g.annotation, e.annotation, "%s function %s parameter %d" % (ctx, got, i))
コード例 #15
0
def inspect_missing_functions(original_type, target_type):
    """
    Find functions which exist in original_type but not in target_type,
    or the signature is modified.

    :return: the tuple of the missing function name and its signature,
             and the name of the functions the signature of which is different
             and its original and modified signature.
    """
    missing = []
    modified = []

    for name, func in inspect.getmembers(original_type, inspect.isfunction):
        # Skip the private attributes
        if name.startswith('_'):
            continue

        original_signature = inspect.signature(func, follow_wrapped=True)

        if hasattr(target_type, name):
            f = getattr(target_type, name)
            if inspect.isfunction(f):
                target_signature = inspect.signature(f)
                if str(original_signature) != str(target_signature):
                    modified.append((name, original_signature, target_signature))
                continue

        missing.append((name, original_signature))

    return missing, modified
コード例 #16
0
ファイル: pdb.py プロジェクト: holycrepe/pdbpp
    def do_inspect(self, arg):
        obj = self._getval(arg)

        data = OrderedDict()
        data['Type'] = type(obj).__name__
        data['String Form'] = str(obj).strip()
        if hasattr(obj, '__len__'):
            data['Length'] = len(obj)
        try:
            data['File'] = inspect.getabsfile(obj)
        except TypeError:
            pass

        if (isinstance(obj, type)
                and hasattr(obj, '__init__')
                and getattr(obj, '__module__') != '__builtin__'):
            # Class - show definition and docstring for constructor
            data['Docstring'] = obj.__doc__
            data['Constructor information'] = ''
            try:
                data[' Definition'] = '%s%s' % (arg, signature(obj))
            except ValueError:
                pass
            data[' Docstring'] = obj.__init__.__doc__
        else:
            try:
                data['Definition'] = '%s%s' % (arg, signature(obj))
            except (TypeError, ValueError):
                pass
            data['Docstring'] = obj.__doc__

        for key, value in data.items():
            formatted_key = Color.set(Color.red, key + ':')
            self.stdout.write('%-28s %s\n' % (formatted_key, value))
コード例 #17
0
    def test_get_final_value(self):
        mapping = self._settings_mapping
        v = mapping.get_final_value('enabled', fparams=None)
        self.assertEqual(v, True)

        mapping['enabled'] = 'enabled_kwd='
        d = {'enabled_kwd': 17}
        v = mapping.get_final_value('enabled', d, fparams=None)
        self.assertEqual(v, 17)

        def f(a, enabled_kwd=3):
            pass

        fparams = inspect.signature(f).parameters
        v = mapping.get_final_value('enabled', fparams=fparams)
        self.assertEqual(v, 3)

        def g(a, wrong_kwd='nevermind'):
            pass
        gparams = inspect.signature(g).parameters
        v = mapping.get_final_value('enabled', fparams=gparams)
        self.assertEqual(v, False)

        def h(a, enabled_kwd=[]):
            pass
        hparams = inspect.signature(h).parameters
        v = mapping.get_final_value('enabled', fparams=hparams)
        self.assertEqual(v, False)
コード例 #18
0
ファイル: ObjectWrapper.py プロジェクト: mentaal/aio-rpc
    def __init__(self, *, obj, loop, whitelist=None, blacklist=None,
            executor=ThreadPoolExecutor, timeout=5):
        '''Initialize what methods are exposed. Also intialize an executor to
        run the object's methods in. THis is because they could be blocking and
        calling these directly would drastically affect the reactivity of the
        event loop.

        Args:
            obj (object): The object to expose

            loop (asyncio event loop): The event loop to use when scheduling a
            function

        Kwargs:
            whitelist (iterable): A list of methods to expose
            blacklist (iterable): A list of methods not to expose
            timeout (int): The timeout value to wait for the executed functions
            to return
            executor (ProcessPoolExecutor or ThreadPoolExecutor): The executor
            upon which to execute the objects functions.

        Whitelist and blacklist of mutually exclusive. Only use one of
        these!

        >>> loop = asyncio.get_event_loop()
        >>> class t():
        ...     def func(self):
        ...         pass
        >>> a = ObjectWrapper(t, loop, whitelist=['func'])

        '''

        self._obj = obj
        self._loop = loop

        self.__add_executor(loop, executor=executor)

        obj_methods = getmembers(obj, ismethod)

        self._funcs = {}
        self._func_sigs = {}

        if whitelist is not None:
            for func_name,func in obj_methods:
                if func_name in whitelist:
                    self._funcs[func_name] = func_caller(func, loop, timeout)
                    self._func_sigs[func_name] = signature(func)
        elif blacklist is not None:
            for func_name,func in obj_methods:
                if func_name in blacklist or func_name[0] == '_':
                    continue
                self._funcs[func_name] = func_caller(func, loop, timeout)
                self._func_sigs[func_name] = signature(func)
        else:
            for func_name,func in obj_methods:
                if func_name[0] == '_':
                    continue
                self._funcs[func_name] = func_caller(func, loop, timeout)
                self._func_sigs[func_name] = signature(func)
コード例 #19
0
ファイル: test_argspec.py プロジェクト: bastibe/PySoundFile
def test_order_of_blocks_arguments():
    from inspect import signature

    # remove 'self':
    meth_args = list(signature(sf.SoundFile.blocks).parameters)[1:]
    meth_args[3:3] = ['start', 'stop']
    func_args = list(signature(sf.blocks).parameters)
    assert func_args[:10] == ['file'] + meth_args
コード例 #20
0
ファイル: rpc.py プロジェクト: Dobatymo/cerpcerus
def call_and_catch_signature_error(_attr, *args, **kwargs):
	try:
		return _attr(*args, **kwargs)
	except TypeError as e:
		try:
			signature(_attr).bind(*args, **kwargs) # do sig test only in error case to preserve resources on successful calls
			raise
		except TypeError:
			raise RPCInvalidArguments(e)
コード例 #21
0
ファイル: controller.py プロジェクト: em-2/em2
 def _check_arguments(func, std_arg_name, kwargs):
     """
     Check kwargs passed match the signature of the function. This is a slight hack but is required since we can't
     catch argument mismatches without catching all TypeErrors which is worse.
     """
     try:
         inspect.signature(func).bind(**{std_arg_name: None}, **kwargs)
     except TypeError as e:
         raise BadDataException('{}: {}'.format(func.__qualname__, e.args[0])) from e
コード例 #22
0
    def init_array(self, name, ary, value):
        # No defaults are supplied
        if value is None:
            ary.fill(0)
        # The array is defaulted with some function
        elif isinstance(value, types.MethodType):
            try:
                signature(value).bind(self, ary)
            except TypeError:
                raise TypeError(('The signature of the function supplied '
                    'for setting the value on array %s is incorrect. '
                    'The function signature has the form f(slvr, ary), '
                    'where f is some function that will set values '
                    'on the array, slvr is a Solver object which provides ' 
                    'useful information to the function, '
                    'and ary is the NumPy array which must be '
                    'initialised with values.') % (name))

            returned_ary = value(self, ary)

            if returned_ary is not None:
                ary[:] = returned_ary
        elif isinstance(value, types.LambdaType):
            try:
                signature(value).bind(self, ary)
            except TypeError:
                raise TypeError(('The signature of the lambda supplied '
                    'for setting the value on array %s is incorrect. '
                    'The function signature has the form lambda slvr, ary:, '
                    'where lambda provides functionality for setting values '
                    'on the array, slvr is a Solver object which provides ' 
                    'useful information to the function, '
                    'and ary is the NumPy array which must be '
                    'initialised with values.') % (name))

            returned_ary = value(self, ary)

            if returned_ary is not None:
                ary[:] = returned_ary
        # Got an ndarray, try set it equal
        elif isinstance(value, np.ndarray):
            try:
                ary[:] = value
            except BaseException as e:
                raise ValueError(('Tried to assign array %s with '
                    'value NumPy array, but this failed '
                    'with %s') % (name, repr(e)))
        # Assume some sort of value has been supplied
        # Give it to NumPy
        else:
            try:
                ary.fill(value)
            except BaseException as e:
                raise ValueError(('Tried to fill array %s with '
                    'value value %s, but NumPy\'s fill function '
                    'failed with %s') % (name, value, repr(e)))
コード例 #23
0
def replace_method(the_class, class_method_name, new_method):
    class_method = getattr(the_class, class_method_name)

    old_signature = set(inspect.signature(class_method).parameters)
    new_signature = set(inspect.signature(new_method).parameters)

    if old_signature < new_signature:  # only patch if SUBSET of parameters
        setattr(the_class, class_method_name, new_method)
        logger.info("{} replaced with {}".format(
            class_method_name, new_method.__name__))
コード例 #24
0
ファイル: tools.py プロジェクト: gb119/Stoner-PythonCode
def fix_signature(proxy_func, wrapped_func):
    """Tries to update proxy_func to have a signature that matches the wrapped func."""
    try:
        proxy_func.__wrapped__.__signature__ = inspect.signature(wrapped_func)
    except AttributeError:  # Non-critical error
        try:
            proxy_func.__signature__ = inspect.signature(wrapped_func)
        except AttributeError:
            pass
    return proxy_func
コード例 #25
0
ファイル: test_signature.py プロジェクト: aepsil0n/obsub
def test_signature():
    # Define a test class and an event handler
    a = A()

    # signature is preserved: (!!)
    assert signature(a.on_blubb) == signature(on_blubb)

    # NOTE: we even got the exact object as default parameter, not only an
    # exact copy:
    assert a.on_blubb(with_kwonlyarg="xyz") is referenced
コード例 #26
0
ファイル: plugin.py プロジェクト: Forkk/vlyc2
	def __init__(self, name, bases, dict):
		super(_PythonSitePluginMeta, self).__init__(name, bases, dict)
		if bases == ():
			return # the original SitePlugin class
		for name in "name", "author", "rev":
			if name not in self.__dict__:
				raise TypeError("SitePlugin subclasses must define the 'name:str', 'author:str' and 'rev:int' properties")
		obj = self()
		for name in "forUrl", "video":
			inspect.signature(getattr(obj, name)).bind("singleArg")
		_state.register_site(obj)
コード例 #27
0
ファイル: clscommand.py プロジェクト: HSpear/clize
 def __init__(self):
     self.__signature__ = forwards(
         signature(self.__call__),
         merge(
             signature(self.do),
             signature(self.prepare),
             signature(self.status)
         )
     )
     self._sigtools__wrappers = (
         self.do, self.prepare, self.status, self.__call__)
コード例 #28
0
def has_request_args(fn):
	sig = inspect.signature(fn)
	params = inspect.signature(fn).parameters
	found = False
	for name, param in params.items():
		if name == 'request':
			found = True
			continue
		if found and (param.kind != inspect.Parameter.VAR_POSITIONAL and param.kind != inspect.Parameter.KEYWORD_ONLY and param.kind != inspect.Parameter.VAR_KEYWORD):
			raise ValueError('request parameter must be the last named parameter in function: %s%s' % (fn.__name__, str(sig)))
	return found
コード例 #29
0
    def authenticate(self, request, remote_user, shib_meta):
        """
        The username passed as ``remote_user`` is considered trusted.  This
        method simply returns the ``User`` object with the given username,
        creating a new ``User`` object if ``create_unknown_user`` is ``True``.

        Returns None if ``create_unknown_user`` is ``False`` and a ``User``
        object with the given username is not found in the database.
        """
        if not remote_user:
            return
        User = get_user_model()
        username = self.clean_username(remote_user)
        field_names = [x.name for x in User._meta.get_fields()]
        shib_user_params = dict([(k, shib_meta[k]) for k in field_names if k in shib_meta])
        # Note that this could be accomplished in one try-except clause, but
        # instead we use get_or_create when creating unknown users since it has
        # built-in safeguards for multiple threads.
        if self.create_unknown_user:
            user, created = User.objects.get_or_create(username=username, defaults=shib_user_params)
            if created:
                """
                @note: setting password for user needs on initial creation of user instead of after auth.login() of middleware.
                because get_session_auth_hash() returns the salted_hmac value of salt and password.
                If it remains after the auth.login() it will return a different auth_hash
                than what's stored in session "request.session[HASH_SESSION_KEY]".
                Also we don't need to update the user's password everytime he logs in.
                """
                user.set_unusable_password()
                user.save()
                args = (request, user)
                try:
                    inspect.signature(self.configure_user).bind(request, user)
                except AttributeError:
                    # getcallargs required for Python 2.7 support, deprecated after 3.5
                    try:
                        inspect.getcallargs(self.configure_user, request, user)
                    except TypeError:
                        args = (user,)
                except TypeError:
                    args = (user,)
                user = self.configure_user(*args)
        else:
            try:
                user = User.objects.get(username=username)
            except User.DoesNotExist:
                return
        # After receiving a valid user, we update the the user attributes according to the shibboleth
        # parameters. Otherwise the parameters (like mail address, sure_name or last_name) will always
        # be the initial values from the first login. Only save user object if there are any changes.
        if not min([getattr(user, k) == v for k, v in shib_user_params.items()]):
            user.__dict__.update(**shib_user_params)
            user.save()
        return user if self.user_can_authenticate(user) else None
コード例 #30
0
ファイル: meta.py プロジェクト: hiway/curio
    def decorate(asyncfunc):
        if inspect.signature(syncfunc) != inspect.signature(asyncfunc):
            raise TypeError('%s and async %s have different signatures' %
                            (syncfunc.__name__, asyncfunc.__name__))

        @wraps(asyncfunc)
        def wrapper(*args, **kwargs):
            if _from_coroutine():
                return asyncfunc(*args, **kwargs)
            else:
                return syncfunc(*args, **kwargs)
        return wrapper
コード例 #31
0
ファイル: orttrainer.py プロジェクト: miketartar/onnxruntime
    def _convert_torch_model_loss_fn_to_onnx(self, inputs, device):
        # Dynamic axes
        dynamic_axes = {}
        for input in self.model_desc.inputs:
            symbolic_axis = {}
            for i, axis in enumerate(input.shape):
                if isinstance(axis, str):
                    symbolic_axis[i] = axis
            if len(symbolic_axis):
                dynamic_axes[input.name] = symbolic_axis
        for output in self.model_desc.outputs:
            symbolic_axis = {}
            for i, axis in enumerate(output.shape):
                if isinstance(axis, str):
                    symbolic_axis[i] = axis
            if len(symbolic_axis):
                dynamic_axes[output.name] = symbolic_axis

        if isinstance(inputs, torch.Tensor):
            inputs = [inputs]
        if isinstance(inputs, dict):
            sample_inputs = [
                inputs[k.name_].to(device=device)
                for k in self.model_desc.inputs
            ]
        elif isinstance(inputs, (list, tuple)):
            sample_inputs = [
                input.to(device=device) for i, input in enumerate(inputs)
                if i < len(self.model_desc.inputs)
            ]
        else:
            raise RuntimeError(
                "Unexpected input type. Only torch.Tensor, or dict/list/tuple of torch.Tensor is supported."
            )

        # PyTorch ONNX exporter does not match argument names
        # This is an issue because the ONNX graph depends on all inputs to be specified

        # Validate loss_fn
        if self.loss_fn:
            sig_loss = signature(self.loss_fn)
            if len(sig_loss.parameters) != 2:
                raise RuntimeError(
                    "loss function should take two arguments - predict and label."
                )

        # Basic input names from model
        input_names = [input.name for input in self.model_desc.inputs]
        sig = signature(self._torch_model.forward)
        ordered_input_list = list(sig.parameters.keys())

        # Label from loss_fn goes after model input
        if self.loss_fn:
            ordered_input_list = [
                *ordered_input_list,
                list(sig_loss.parameters.keys())[1]
            ]

        class CombineTorchModelLossFnWrapInput(torch.nn.Module):
            def __init__(self, model, loss_fn, input_names):
                super().__init__()
                self.model = model
                self.loss_fn = loss_fn
                self.input_names = input_names

            def forward(self, *inputs):
                sig = signature(self.model.forward)

                input_dict = {}
                for key in sig.parameters.keys():
                    if key in self.input_names:
                        input_dict[key] = inputs[self.input_names.index(key)]

                model_out = self.model(**input_dict)
                if self.loss_fn is None:
                    return model_out

                label = inputs[-1]
                preds = model_out
                return self.loss_fn(preds, label), preds

        model = CombineTorchModelLossFnWrapInput(self._torch_model,
                                                 self.loss_fn, input_names)

        # Do an inference to grab output types
        model.eval()
        with torch.no_grad():
            # Deepcopy inputs, since input values may change after model run.
            sample_inputs_copy = copy.deepcopy(sample_inputs)
            try:
                # Deepcopy model, in case model is stateful and changes after model run.
                model_copy = copy.deepcopy(model)
            except Exception:
                model_copy = model
                warnings.warn(
                    "This model cannot be deep copied (or pickled), which is a required step for stateful models to be properly exported to ONNX."
                    " Compute will continue, but unexpected results may occur!"
                )
            sample_outputs = model_copy(*sample_inputs_copy)
            self.torch_sample_outputs = sample_outputs
        model.train()

        if isinstance(sample_outputs, torch.Tensor):
            sample_outputs = [sample_outputs]

        # Append 'dtype' for model description's inputs/outputs
        for idx_i, sample_input in enumerate(sample_inputs):
            if idx_i < len(self.model_desc.inputs):
                self.model_desc.add_type_to_input_description(
                    idx_i, sample_input.dtype)
        for idx_o, sample_output in enumerate(sample_outputs):
            if idx_o < len(self.model_desc.outputs):
                self.model_desc.add_type_to_output_description(
                    idx_o, sample_output.dtype)

        # Export the model to ONNX
        f = io.BytesIO()

        # Deepcopy inputs, since input values may change after model run.
        sample_inputs_copy = copy.deepcopy(sample_inputs)

        # Handle contrib OPs support
        from onnxruntime.training import register_custom_ops_pytorch_exporter
        if self.options._internal_use.enable_onnx_contrib_ops:
            # Enable contrib ops export from PyTorch
            register_custom_ops_pytorch_exporter.register_custom_op()
        else:
            # Unregister contrib ops, if they were registered in previous calls
            register_custom_ops_pytorch_exporter.unregister_custom_op()

        # Export torch.nn.Module to ONNX
        torch.onnx._export(
            model,
            tuple(sample_inputs_copy),
            f,
            input_names=[input.name for input in self.model_desc.inputs],
            output_names=[output.name for output in self.model_desc.outputs],
            opset_version=self.options._internal_use.onnx_opset_version,
            dynamic_axes=dynamic_axes,
            _retain_param_name=True,
            example_outputs=tuple(sample_outputs),
            do_constant_folding=False,
            training=torch.onnx.TrainingMode.TRAINING)
        onnx_model = onnx.load_model_from_string(f.getvalue())

        # Remove 'model.' prefix introduced by CombineTorchModelLossFn class
        if isinstance(model, CombineTorchModelLossFnWrapInput):
            replace_name_dict = {}
            for n in onnx_model.graph.initializer:
                if n.name.startswith('model.'):
                    replace_name_dict[n.name] = n.name[len('model.'):]
                    n.name = replace_name_dict[n.name]
            for n in onnx_model.graph.node:
                for i, name in enumerate(n.input):
                    if name in replace_name_dict:
                        n.input[i] = replace_name_dict[name]

        return onnx_model
コード例 #32
0
# -*- coding: utf-8 -*-
import inspect
#Qdef a(a, b=0, *c, d, e=1, **f):
def a(a, **f):
    pass
aa = inspect.signature(a)
print("inspect.signature(fn)是:%s" % aa)
print("inspect.signature(fn)的类型:%s" % (type(aa)))
print("\n")
コード例 #33
0
ファイル: primitives.py プロジェクト: harmsm/gpgraph
def _nx_wrapper(nx_function, ax, **kwargs):
    """
    This is a wrapper for NetworkX functions. NetworkX graphing functions have
    many parameters that change relatively often. This wrapper is an attempt at
    future-proofing. It uses `inspect` to figure out what parameters the
    function expects and then only passes parameters that will be recognized.
    If a parameter is not recognized, the function throws a warning which
    will hopefully alert the user to changes in the NetworkX API without
    causing our package to crash.  This function throws a ValueError if a
    required arugment is not passed.

    Parameters
    ----------
    nx_function : function
        a NetworkX function to wrap.
    ax : matplotlib.axes object
        axis on whichi to do plotting. this low-level function assumes you've
        already made the ax in some other context.
    **kwargs :
        kwargs arguments to pass to that function.

    returns ax, nx_fcn_return
    """

    required_args = {}
    args_with_defaults = {}

    # Figure out what networkx function expects
    fcn_sig = inspect.signature(nx_function)
    for p in fcn_sig.parameters:
        if fcn_sig.parameters[p].default is fcn_sig.parameters[p].empty:
            required_args[p] = None
        else:
            args_with_defaults[p] = fcn_sig.parameters[p]

    # Go through the kwargs passed in and construct input_kwargs.
    input_kwargs = {}
    input_kwargs["ax"] = ax

    bad_keys = []
    for k in kwargs:

        # First see if this is a required argument without a default.
        try:
            required_args.pop(k)
            input_kwargs[k] = kwargs[k]
            continue

        except KeyError:
            pass

        # Next see if this is an argument that has a default.
        try:
            args_with_defaults.pop(k)
            input_kwargs[k] = kwargs[k]
            continue

        except KeyError:
            pass

        bad_keys.append(k)

    # Make sure all required arguments were passed
    if len(required_args) > 0:
        err = f"Not all required arguments passed to '{nx_function}'\n"
        err += f"\nYour networkx version is {nx.__version__}\n"
        err += f"Missing required arguments are:\n"
        for a in required_args:
            err += f"    {a}\n"
        err += "\n"
        raise ValueError(err)

    # Warn if arguments were passed that were not recognized.
    if len(bad_keys) > 0:
        w = f"Not all keyword arguments were recognized by '{nx_function}'\n"
        w += f"\nYour networkx version is {nx.__version__}\n"
        w += f"Unrecognized arguments are:\n"
        for a in bad_keys:
            w += f"    {a}\n"
        w += "\n"

        if len(args_with_defaults) > 0:
            w += "\nNot all possible arguments were supplied. Did you mean to \n"
            w += "pass one of these (default given in parentheses)?\n"
            for a in args_with_defaults:
                w += f"    {a} ({args_with_defaults[a].default})\n"
            w += "\n\n"

        warnings.warn(w)

    return ax, nx_function(**input_kwargs)
コード例 #34
0
ファイル: network.py プロジェクト: Tenoke/stylegan
    def _init_graph(self) -> None:
        # Collect inputs.
        self.input_names = []

        for param in inspect.signature(self._build_func).parameters.values():
            if param.kind == param.POSITIONAL_OR_KEYWORD and param.default is param.empty:
                self.input_names.append(param.name)

        self.num_inputs = len(self.input_names)
        assert self.num_inputs >= 1

        # Choose name and scope.
        if self.name is None:
            self.name = self._build_func_name
        assert re.match("^[A-Za-z0-9_.\\-]*$", self.name)
        with tf.name_scope(None):
            self.scope = tf.get_default_graph().unique_name(self.name,
                                                            mark_as_used=True)

        # Finalize build func kwargs.
        build_kwargs = dict(self.static_kwargs)
        build_kwargs["is_template_graph"] = True
        build_kwargs["components"] = self.components

        # Build template graph.
        with tfutil.absolute_variable_scope(
                self.scope, reuse=tf.AUTO_REUSE), tfutil.absolute_name_scope(
                    self.scope):  # ignore surrounding scopes
            assert tf.get_variable_scope().name == self.scope
            assert tf.get_default_graph().get_name_scope() == self.scope
            with tf.control_dependencies(
                    None):  # ignore surrounding control dependencies
                self.input_templates = [
                    tf.placeholder(tf.float32, name=name)
                    for name in self.input_names
                ]
                out_expr = self._build_func(*self.input_templates,
                                            **build_kwargs)

        # Collect outputs.
        assert tfutil.is_tf_expression(out_expr) or isinstance(out_expr, tuple)
        self.output_templates = [
            out_expr
        ] if tfutil.is_tf_expression(out_expr) else list(out_expr)
        self.num_outputs = len(self.output_templates)
        assert self.num_outputs >= 1
        assert all(tfutil.is_tf_expression(t) for t in self.output_templates)

        # Perform sanity checks.
        if any(t.shape.ndims is None for t in self.input_templates):
            raise ValueError(
                "Network input shapes not defined. Please call x.set_shape() for each input."
            )
        if any(t.shape.ndims is None for t in self.output_templates):
            raise ValueError(
                "Network output shapes not defined. Please call x.set_shape() where applicable."
            )
        if any(not isinstance(comp, Network)
               for comp in self.components.values()):
            raise ValueError(
                "Components of a Network must be Networks themselves.")
        if len(self.components) != len(
                set(comp.name for comp in self.components.values())):
            raise ValueError("Components of a Network must have unique names.")

        # List inputs and outputs.
        self.input_shapes = [
            tfutil.shape_to_list(t.shape) for t in self.input_templates
        ]
        self.output_shapes = [
            tfutil.shape_to_list(t.shape) for t in self.output_templates
        ]
        self.input_shape = self.input_shapes[0]
        self.output_shape = self.output_shapes[0]
        self.output_names = [
            t.name.split("/")[-1].split(":")[0] for t in self.output_templates
        ]

        # List variables.
        self.own_vars = OrderedDict(
            (var.name[len(self.scope) + 1:].split(":")[0], var)
            for var in tf.global_variables(self.scope + "/"))
        self.vars = OrderedDict(self.own_vars)
        self.vars.update((comp.name + "/" + name, var)
                         for comp in self.components.values()
                         for name, var in comp.vars.items())
        self.trainables = OrderedDict(
            (name, var) for name, var in self.vars.items() if var.trainable)
        self.var_global_to_local = OrderedDict(
            (var.name.split(":")[0], name) for name, var in self.vars.items())
コード例 #35
0
    def handle_event(self, event: CommandRequestEvent,
                     kafka_service: KafkaService):
        # Safety check
        payload = event.payload
        if not payload or not isinstance(payload, CommandRequestPayload):
            logger.info("Wrong payload passed")
            return

        command = event.payload.command
        args = event.payload.args

        logger.info(f"Command {command} called with {args}")

        if not hasattr(self.cmd_object, command):
            # May be another callback in the chain will be able to handle it
            logger.info("No command method found")
            return
        method = getattr(self.cmd_object, command)

        if callable(method):
            sig = signature(method)

            result = None
            code = KafkaCommandCallback.CODE_SUCCESS
            msg = self._get_message(KafkaCommandCallback.OK, command)

            pos_args = []
            kw_args = {}

            if isinstance(args, list):
                pos_args = tuple(args)
            elif isinstance(args, dict):
                kw_args = dict(args)

            if "event" in sig.parameters:
                kw_args["event"] = event
            if "kafka_service" in sig.parameters:
                kw_args["kafka_service"] = kafka_service

            params = OrderedDict(sig.parameters)

            # Remove positionals
            orderparams = list(params)
            for i in range(0, min(len(pos_args), len(orderparams))):
                del params[orderparams[i]]

            # Remove named
            for k in kw_args:
                if k in params:
                    del params[k]

            from_event = vars(event)
            for key in from_event.keys() & params.keys():
                kw_args[key] = from_event[key]
            try:
                logger.info(f"executing {command}({pos_args}, {kw_args})")
                result = method(*pos_args, **kw_args)
            except TypeError as e:
                logger.exception("Method invocation failed")
                code = KafkaCommandCallback.CODE_CLIENT_ERROR
                msg = self._get_message(KafkaCommandCallback.INVALID_ARGS,
                                        command, str(e))
            except Exception as e:
                logger.exception("Command failed")
                code = KafkaCommandCallback.CODE_COMMAND_ERROR
                msg = self._get_message(KafkaCommandCallback.UNKNOWN_ERROR,
                                        command, str(e))

            response_payload = CommandResponsePayload(code, msg, event, result)
            response_event = create_from_object(CommandResponseEvent,
                                                event,
                                                payload=response_payload)
            kafka_service.send_event(response_event, self.cmd_return_topic)
        else:
            logger.info(f"Uncallable {command} member requested")
コード例 #36
0
    def create(
            cls,
            obj: Any,
            class_from_init: bool,
            ignore_dunder: bool,
            ignore_private: bool,
            ignore_inherited: bool,
            importing_module: Optional[str] = None,
            prefer_docstring: bool = True
    ) -> ClassDescriptor:
        """Create a class

        Args:
            obj (Any): The class
            class_from_init (bool): If True take the docstring from the init function
            ignore_dunder (bool): If True ignore
                <span>&#95;&#95;</span>XXX<span>&#95;&#95;</span> functions
            ignore_private (bool): If True ignore private methods (those
                prefixed <span>&#95;</span>XXX)
            ignore_inherited (bool): If True ignore inherited methods
            importing_module (Optional[str], optional): The importing module, defaults to None
            prefer_docstring (bool): If true prefer the docstring.

        Returns:
            ClassDescriptor: The class descriptor
        """
        is_named_tuple = is_named_tuple_type(obj)

        valid_names: List[str] = []
        valid_names.extend(getattr(obj, '__dict__', {}).keys())
        valid_names.extend(getattr(obj, '__slots__', []))

        members: Dict[str, Any] = {
            name: value
            for name, value in inspect.getmembers(obj)
            if not ignore_inherited or name in valid_names
        }

        docstring = _get_docstring(obj, members, class_from_init, is_named_tuple)
        name = obj.__qualname__ if hasattr(
            obj, '__qualname__') else obj.__name__
        summary = docstring.short_description if docstring else None
        description = docstring.long_description if docstring else None

        attributes: List[ArgumentDescriptor] = []
        if docstring:
            attrs = [
                (meta.args[1], meta.description)
                for meta in docstring.meta
                if 'attribute' in meta.args
            ]
            for attr_details, attr_desc in attrs:
                attr_name, _sep, attr_type = attr_details.partition(' ')
                attr_type = attr_type.strip('()')
                attributes.append(
                    ArgumentDescriptor(attr_name, attr_type, attr_desc)
                )
        properties: List[PropertyDescriptor] = []
        methods: List[CallableDescriptor] = []
        class_methods: List[CallableDescriptor] = []
        for member_name, member in members.items():
            if member_name == '__init__' or (
                    ignore_dunder and
                    member_name.startswith('__') and
                    member_name.endswith('__')
            ) or (ignore_private and member_name.startswith('_')):
                continue

            if member.__class__ is property:
                properties.append(
                    PropertyDescriptor.create(
                        member,
                        obj,
                        member_name
                    )
                )
            elif inspect.isfunction(member):
                # Instance methods
                methods.append(
                    CallableDescriptor.create(
                        member,
                        callable_type=CallableType.METHOD,
                        prefer_docstring=prefer_docstring,
                        qualifier=name
                    )
                )
            elif inspect.ismethod(member):
                # Class methods
                class_methods.append(
                    CallableDescriptor.create(
                        member,
                        callable_type=CallableType.CLASS_METHOD,
                        prefer_docstring=prefer_docstring,
                        qualifier=name
                    )
                )

        examples: Optional[List[str]] = [
            meta.description
            for meta in docstring.meta
            if 'examples' in meta.args
        ] if docstring is not None else None

        module_obj = inspect.getmodule(obj)
        module = importing_module or obj.__module__
        package = module_obj.__package__ if module_obj else None
        file = make_file_relative(
            module_obj.__file__
            if module_obj and hasattr(module_obj, '__file__')
            else None
        )

        bases = [
            ClassDescriptor.create(
                base,
                class_from_init=class_from_init,
                ignore_dunder=ignore_dunder,
                ignore_private=ignore_private,
                ignore_inherited=ignore_inherited,
                importing_module=importing_module,
                prefer_docstring=prefer_docstring
            )
            for base in getattr(obj, '__bases__', [])
            if base is not object
        ]

        try:
            signature = inspect.signature(obj)
            constructor = CallableDescriptor.create(
                obj,
                signature,
                docstring,
                CallableType.CONSTRUCTOR,
                prefer_docstring=prefer_docstring
            )
        except ValueError:
            constructor = None

        return ClassDescriptor(
            name,
            summary,
            description,
            constructor,
            attributes,
            properties,
            class_methods,
            methods,
            examples,
            module,
            package,
            file,
            bases
        )
コード例 #37
0
ファイル: utils.py プロジェクト: ekand/numpy
def info(object=None, maxwidth=76, output=sys.stdout, toplevel='numpy'):
    """
    Get help information for a function, class, or module.

    Parameters
    ----------
    object : object or str, optional
        Input object or name to get information about. If `object` is a
        numpy object, its docstring is given. If it is a string, available
        modules are searched for matching objects.  If None, information
        about `info` itself is returned.
    maxwidth : int, optional
        Printing width.
    output : file like object, optional
        File like object that the output is written to, default is
        ``stdout``.  The object has to be opened in 'w' or 'a' mode.
    toplevel : str, optional
        Start search at this level.

    See Also
    --------
    source, lookfor

    Notes
    -----
    When used interactively with an object, ``np.info(obj)`` is equivalent
    to ``help(obj)`` on the Python prompt or ``obj?`` on the IPython
    prompt.

    Examples
    --------
    >>> np.info(np.polyval) # doctest: +SKIP
       polyval(p, x)
         Evaluate the polynomial p at x.
         ...

    When using a string for `object` it is possible to get multiple results.

    >>> np.info('fft') # doctest: +SKIP
         *** Found in numpy ***
    Core FFT routines
    ...
         *** Found in numpy.fft ***
     fft(a, n=None, axis=-1)
    ...
         *** Repeat reference found in numpy.fft.fftpack ***
         *** Total of 3 references found. ***

    """
    global _namedict, _dictlist
    # Local import to speed up numpy's import time.
    import pydoc
    import inspect

    if (hasattr(object, '_ppimport_importer') or
           hasattr(object, '_ppimport_module')):
        object = object._ppimport_module
    elif hasattr(object, '_ppimport_attr'):
        object = object._ppimport_attr

    if object is None:
        info(info)
    elif isinstance(object, ndarray):
        _info(object, output=output)
    elif isinstance(object, str):
        if _namedict is None:
            _namedict, _dictlist = _makenamedict(toplevel)
        numfound = 0
        objlist = []
        for namestr in _dictlist:
            try:
                obj = _namedict[namestr][object]
                if id(obj) in objlist:
                    print("\n     "
                          "*** Repeat reference found in %s *** " % namestr,
                          file=output
                          )
                else:
                    objlist.append(id(obj))
                    print("     *** Found in %s ***" % namestr, file=output)
                    info(obj)
                    print("-"*maxwidth, file=output)
                numfound += 1
            except KeyError:
                pass
        if numfound == 0:
            print("Help for %s not found." % object, file=output)
        else:
            print("\n     "
                  "*** Total of %d references found. ***" % numfound,
                  file=output
                  )

    elif inspect.isfunction(object) or inspect.ismethod(object):
        name = object.__name__
        try:
            arguments = str(inspect.signature(object))
        except Exception:
            arguments = "()"

        if len(name+arguments) > maxwidth:
            argstr = _split_line(name, arguments, maxwidth)
        else:
            argstr = name + arguments

        print(" " + argstr + "\n", file=output)
        print(inspect.getdoc(object), file=output)

    elif inspect.isclass(object):
        name = object.__name__
        try:
            arguments = str(inspect.signature(object))
        except Exception:
            arguments = "()"

        if len(name+arguments) > maxwidth:
            argstr = _split_line(name, arguments, maxwidth)
        else:
            argstr = name + arguments

        print(" " + argstr + "\n", file=output)
        doc1 = inspect.getdoc(object)
        if doc1 is None:
            if hasattr(object, '__init__'):
                print(inspect.getdoc(object.__init__), file=output)
        else:
            print(inspect.getdoc(object), file=output)

        methods = pydoc.allmethods(object)

        public_methods = [meth for meth in methods if meth[0] != '_']
        if public_methods:
            print("\n\nMethods:\n", file=output)
            for meth in public_methods:
                thisobj = getattr(object, meth, None)
                if thisobj is not None:
                    methstr, other = pydoc.splitdoc(
                            inspect.getdoc(thisobj) or "None"
                            )
                print("  %s  --  %s" % (meth, methstr), file=output)

    elif hasattr(object, '__doc__'):
        print(inspect.getdoc(object), file=output)
コード例 #38
0
ファイル: compat.py プロジェクト: Cecijvp/DjangoUdemyCourse
def getfuncargnames(
    function: Callable[..., Any],
    *,
    name: str = "",
    is_method: bool = False,
    cls: Optional[type] = None
) -> Tuple[str, ...]:
    """Returns the names of a function's mandatory arguments.

    This should return the names of all function arguments that:
        * Aren't bound to an instance or type as in instance or class methods.
        * Don't have default values.
        * Aren't bound with functools.partial.
        * Aren't replaced with mocks.

    The is_method and cls arguments indicate that the function should
    be treated as a bound method even though it's not unless, only in
    the case of cls, the function is a static method.

    The name parameter should be the original name in which the function was collected.
    """
    # TODO(RonnyPfannschmidt): This function should be refactored when we
    # revisit fixtures. The fixture mechanism should ask the node for
    # the fixture names, and not try to obtain directly from the
    # function object well after collection has occurred.

    # The parameters attribute of a Signature object contains an
    # ordered mapping of parameter names to Parameter instances.  This
    # creates a tuple of the names of the parameters that don't have
    # defaults.
    try:
        parameters = signature(function).parameters
    except (ValueError, TypeError) as e:
        fail(
            "Could not determine arguments of {!r}: {}".format(function, e),
            pytrace=False,
        )

    arg_names = tuple(
        p.name
        for p in parameters.values()
        if (
            p.kind is Parameter.POSITIONAL_OR_KEYWORD
            or p.kind is Parameter.KEYWORD_ONLY
        )
        and p.default is Parameter.empty
    )
    if not name:
        name = function.__name__

    # If this function should be treated as a bound method even though
    # it's passed as an unbound method or function, remove the first
    # parameter name.
    if is_method or (
        cls and not isinstance(cls.__dict__.get(name, None), staticmethod)
    ):
        arg_names = arg_names[1:]
    # Remove any names that will be replaced with mocks.
    if hasattr(function, "__wrapped__"):
        arg_names = arg_names[num_mock_patch_args(function) :]
    return arg_names
コード例 #39
0
ファイル: coroweb.py プロジェクト: looklzj/k3fbvj21s3v0rm3e
def has_named_kw_args(fn):
    params = inspect.signature(fn).parameters
    for name, param in params.items():
        if param.kind == inspect.Parameter.KEYWORD_ONLY:
            return True
コード例 #40
0
    def fit(self, X, y, sample_weight=None):
        """Fit the calibrated model

        Parameters
        ----------
        X : array-like, shape (n_samples, n_features)
            Training data.

        y : array-like, shape (n_samples,)
            Target values.

        sample_weight : array-like of shape (n_samples,), default=None
            Sample weights. If None, then samples are equally weighted.

        Returns
        -------
        self : object
            Returns an instance of self.
        """
        X, y = self._validate_data(X,
                                   y,
                                   accept_sparse=['csc', 'csr', 'coo'],
                                   force_all_finite=False,
                                   allow_nd=True)
        X, y = indexable(X, y)
        le = LabelBinarizer().fit(y)
        self.classes_ = le.classes_

        # Check that each cross-validation fold can have at least one
        # example per class
        n_folds = self.cv if isinstance(self.cv, int) \
            else self.cv.n_folds if hasattr(self.cv, "n_folds") else None
        if n_folds and \
                np.any([np.sum(y == class_) < n_folds for class_ in
                        self.classes_]):
            raise ValueError("Requesting %d-fold cross-validation but provided"
                             " less than %d examples for at least one class." %
                             (n_folds, n_folds))

        self.calibrated_classifiers_ = []
        if self.base_estimator is None:
            # we want all classifiers that don't expose a random_state
            # to be deterministic (and we don't want to expose this one).
            base_estimator = LinearSVC(random_state=0)
        else:
            base_estimator = self.base_estimator

        if self.cv == "prefit":
            calibrated_classifier = _CalibratedClassifier(base_estimator,
                                                          method=self.method)
            calibrated_classifier.fit(X, y, sample_weight)
            self.calibrated_classifiers_.append(calibrated_classifier)
        else:
            cv = check_cv(self.cv, y, classifier=True)
            fit_parameters = signature(base_estimator.fit).parameters
            base_estimator_supports_sw = "sample_weight" in fit_parameters

            if sample_weight is not None:
                sample_weight = _check_sample_weight(sample_weight, X)

                if not base_estimator_supports_sw:
                    estimator_name = type(base_estimator).__name__
                    warnings.warn("Since %s does not support sample_weights, "
                                  "sample weights will only be used for the "
                                  "calibration itself." % estimator_name)

            for train, test in cv.split(X, y):
                this_estimator = clone(base_estimator)

                if sample_weight is not None and base_estimator_supports_sw:
                    this_estimator.fit(X[train],
                                       y[train],
                                       sample_weight=sample_weight[train])
                else:
                    this_estimator.fit(X[train], y[train])

                calibrated_classifier = _CalibratedClassifier(
                    this_estimator, method=self.method, classes=self.classes_)
                sw = None if sample_weight is None else sample_weight[test]
                calibrated_classifier.fit(X[test], y[test], sample_weight=sw)
                self.calibrated_classifiers_.append(calibrated_classifier)

        return self
コード例 #41
0
 def test_inspect_sum(self):
     signature = inspect.signature(np.sum)
     assert_('axis' in signature.parameters)
コード例 #42
0
    def check_argument_types(self, function: Callable[..., HttpResponse],
                             openapi_parameters: List[Dict[str, Any]]) -> None:
        """ We construct for both the OpenAPI data and the function's definition a set of
        tuples of the form (var_name, type) and then compare those sets to see if the
        OpenAPI data defines a different type than that actually accepted by the function.
        Otherwise, we print out the exact differences for convenient debugging and raise an
        AssertionError. """
        openapi_params = set(
        )  # type: Set[Tuple[str, Union[type, Tuple[type, object]]]]
        for element in openapi_parameters:
            name = element["name"]  # type: str
            schema = element["schema"]
            if 'oneOf' in schema:
                # Hack: Just use the type of the first value
                # Ideally, we'd turn this into a Union type.
                _type = VARMAP[schema['oneOf'][0]['type']]
            else:
                _type = VARMAP[schema["type"]]
            if _type == list:
                items = schema["items"]
                if "anyOf" in items.keys():
                    subtypes = []
                    for st in items["anyOf"]:
                        st = st["type"]
                        subtypes.append(VARMAP[st])
                    self.assertTrue(len(subtypes) > 1)
                    sub_type = self.get_type_by_priority(subtypes)
                else:
                    sub_type = VARMAP[element["schema"]["items"]["type"]]
                    self.assertIsNotNone(sub_type)
                openapi_params.add((name, (_type, sub_type)))
            else:
                openapi_params.add((name, _type))

        function_params = set(
        )  # type: Set[Tuple[str, Union[type, Tuple[type, object]]]]

        # Iterate through the decorators to find the original
        # function, wrapped by has_request_variables, so we can parse
        # its arguments.
        while getattr(function, "__wrapped__", None):
            function = getattr(function, "__wrapped__", None)
            # Tell mypy this is never None.
            assert function is not None

        # Now, we do inference mapping each REQ parameter's
        # declaration details to the Python/mypy types for the
        # arguments passed to it.
        #
        # Because the mypy types are the types used inside the inner
        # function (after the original data is processed by any
        # validators, converters, etc.), they will not always match
        # the API-level argument types.  The main case where this
        # happens is when a `converter` is used that changes the types
        # of its parameters.
        for vname, defval in inspect.signature(function).parameters.items():
            defval = defval.default
            if defval.__class__ is _REQ:
                # TODO: The below inference logic in cases where
                # there's a converter function declared is incorrect.
                # Theoretically, we could restructure the converter
                # function model so that we can check what type it
                # excepts to be passed to make validation here
                # possible.

                vtype = self.get_standardized_argument_type(
                    function.__annotations__[vname])
                vname = defval.post_var_name  # type: ignore # See zerver/lib/request.py
                function_params.add((vname, vtype))

        diff = openapi_params - function_params
        if diff:  # nocoverage
            self.render_openapi_type_exception(function, openapi_params,
                                               function_params, diff)
コード例 #43
0
def accepts_one_argument(function: Callable):
    return len(signature(function).parameters) == 1
コード例 #44
0
ファイル: utils.py プロジェクト: gradio-app/gradio
def get_default_args(func: Callable) -> Dict[str, Any]:
    signature = inspect.signature(func)
    return [
        v.default if v.default is not inspect.Parameter.empty else None
        for v in signature.parameters.values()
    ]
コード例 #45
0
ファイル: orttrainer.py プロジェクト: miketartar/onnxruntime
    def __init__(self,
                 model,
                 model_desc,
                 optim_config,
                 loss_fn=None,
                 options=None):
        # Basic validation
        assert model is not None, "'model' is required and must be either a 'torch.nn.Module' or ONNX model"
        assert isinstance(model_desc, dict), "'model_desc' must be a 'dict'"
        assert isinstance(optim_config, optim._OptimizerConfig),\
            "'optim_config' is required and must be any of 'AdamConfig', 'LambConfig' or 'SGDConfig'"
        assert loss_fn is None or (callable(loss_fn) and len(signature(loss_fn).parameters) == 2),\
            "'loss_fn' must be either 'None' or a callable with two parameters"
        assert options is None or isinstance(options, ORTTrainerOptions),\
            "'options' must be either 'None' or 'ORTTrainerOptions'"

        #            Model + Loss validation
        #           Supported combinarios are
        #    ----------------------------------------
        #   |   | Model            | Loss            |
        #    ----------------------------------------
        #   | 1 | torch.nn.Module  | None            |
        #   | 2 | torch.nn.Module  | torch.nn.Module |
        #   | 3 | ONNX             | None            |
        #    ----------------------------------------
        self._torch_model = None
        self._onnx_model = None
        if isinstance(model, torch.nn.Module):
            assert loss_fn is None or isinstance(model, torch.nn.Module),\
                "'loss_fn' must be either 'None' or 'torch.nn.Module'"
            self._torch_model = model
            self.loss_fn = loss_fn
            # TODO: Subject to change after checkpoint redesign
            self._torch_state_dict_keys = list(model.state_dict().keys())
        elif isinstance(model, onnx.ModelProto):
            assert loss_fn is None, "'loss_fn' must not be specified when 'model' is an ONNX model"
            self._onnx_model = model
            self.loss_fn = None
        else:
            raise ValueError(
                "'model' must be either 'torch.nn.Module' or 'onnx.ModelProto'"
            )

        self.model_desc = _ORTTrainerModelDesc(model_desc)
        self.optim_config = optim_config

        # ORTTrainerOptions
        if not options:
            options = ORTTrainerOptions()
        self.options = options
        if self.options.mixed_precision.enabled and not self.options.mixed_precision.loss_scaler:
            # TODO: Move this to model_desc_validation.py
            self.options.mixed_precision.loss_scaler = amp.loss_scaler.DynamicLossScaler(
            )

        # Post processing ONNX model given as input
        if self._onnx_model:
            if self.options._internal_use.enable_internal_postprocess:
                self._onnx_model = postprocess.run_postprocess(
                    self._onnx_model)
            if self.options._internal_use.extra_postprocess:
                self._onnx_model = self.options._internal_use.extra_postprocess(
                    self._onnx_model)
                assert isinstance(
                    self._onnx_model, onnx.ModelProto
                ), "'extra_postprocess' must return a ONNX model"

            # When input model is already ONNX (and not exported from Pytorch within ORTTrainer),
            # append 'dtype' from ONNX into model description's
            for idx_i, i_desc in enumerate(self.model_desc.inputs):
                dtype = None
                for onnx_input in self._onnx_model.graph.input:
                    if onnx_input.name == i_desc.name:
                        dtype = _utils.dtype_onnx_to_torch(
                            onnx_input.type.tensor_type.elem_type)
                        self.model_desc.add_type_to_input_description(
                            idx_i, dtype)
                        break
                assert dtype is not None, f"ONNX model with unknown input type ({i_desc.name})"
            for idx_o, o_desc in enumerate(self.model_desc.outputs):
                dtype = None
                for onnx_output in self._onnx_model.graph.output:
                    if onnx_output.name == o_desc.name:
                        dtype = _utils.dtype_onnx_to_torch(
                            onnx_output.type.tensor_type.elem_type)
                        self.model_desc.add_type_to_output_description(
                            idx_o, dtype)
                        break
                assert dtype is not None, f"ONNX model with unknown output type ({o_desc.name})"

        # Set GPU device and memory limit
        if 'cuda' in self.options.device.id.lower():
            mem_limit = self.options.device.mem_limit
            if mem_limit > 0:
                ort.set_cuda_mem_limit(self.options.device.mem_limit)
            ort.set_cuda_device_id(
                _utils.get_device_index(self.options.device.id))

        # TODO: Subject to change after checkpoint redesign
        self._state_dict = {}

        self._train_step_info = TrainStepInfo(self.optim_config)
        self._training_session = None
        self._init_session()
コード例 #46
0
ファイル: testing.py プロジェクト: wbm06/pygmt
    def decorator(func):
        import pytest

        os.makedirs(result_dir, exist_ok=True)
        old_sig = inspect.signature(func)

        @pytest.mark.parametrize("ext", extensions)
        def wrapper(*args, ext="png", request=None, **kwargs):
            if "ext" in old_sig.parameters:
                kwargs["ext"] = ext
            if "request" in old_sig.parameters:
                kwargs["request"] = request
            try:
                file_name = "".join(c for c in request.node.name
                                    if c in ALLOWED_CHARS)
            except AttributeError:  # 'NoneType' object has no attribute 'node'
                file_name = func.__name__
            try:
                fig_ref, fig_test = func(*args, **kwargs)
                ref_image_path = os.path.join(result_dir,
                                              f"{file_name}-expected.{ext}")
                test_image_path = os.path.join(result_dir,
                                               f"{file_name}.{ext}")
                fig_ref.savefig(ref_image_path)
                fig_test.savefig(test_image_path)

                # Code below is adapted for PyGMT, and is originally based on
                # matplotlib.testing.decorators._raise_on_image_difference
                err = compare_images(
                    expected=ref_image_path,
                    actual=test_image_path,
                    tol=tol,
                    in_decorator=True,
                )
                if err is None:  # Images are the same
                    os.remove(ref_image_path)
                    os.remove(test_image_path)
                else:  # Images are not the same
                    for key in ["actual", "expected", "diff"]:
                        err[key] = os.path.relpath(err[key])
                    raise GMTImageComparisonFailure(
                        "images not close (RMS %(rms).3f):\n\t%(actual)s\n\t%(expected)s "
                        % err)
            finally:
                del fig_ref
                del fig_test

        parameters = [
            param for param in old_sig.parameters.values()
            if param.name not in {"fig_test", "fig_ref"}
        ]
        if "ext" not in old_sig.parameters:
            parameters += [inspect.Parameter("ext", KEYWORD_ONLY)]
        if "request" not in old_sig.parameters:
            parameters += [inspect.Parameter("request", KEYWORD_ONLY)]
        new_sig = old_sig.replace(parameters=parameters)
        wrapper.__signature__ = new_sig

        # reach a bit into pytest internals to hoist the marks from
        # our wrapped function
        new_marks = getattr(func, "pytestmark", []) + wrapper.pytestmark
        wrapper.pytestmark = new_marks

        return wrapper
コード例 #47
0
ファイル: coroweb.py プロジェクト: looklzj/k3fbvj21s3v0rm3e
def has_var_kw_arg(fn):
    params = inspect.signature(fn).parameters
    for name, param in params.items():
        if param.kind == inspect.Parameter.VAR_KEYWORD:
            return True
コード例 #48
0
        try:
            src, ext = build[obj]
        except KeyError:
            return
        self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)

    list(
        multiprocessing.pool.ThreadPool(multiprocessing.cpu_count()).imap(
            compile_one, objects))
    return objects


# Plant the parallel compile function.
if _check_env_flag('COMPILE_PARALLEL', default='1'):
    try:
        if (inspect.signature(distutils.ccompiler.CCompiler.compile) ==
                inspect.signature(_compile_parallel)):
            distutils.ccompiler.CCompiler.compile = _compile_parallel
    except:
        pass


class Clean(distutils.command.clean.clean):
    def run(self):
        import glob
        import re
        with open('.gitignore', 'r') as f:
            ignores = f.read()
            pat = re.compile(r'^#( BEGIN NOT-CLEAN-FILES )?')
            for wildcard in filter(None, ignores.split('\n')):
                match = pat.match(wildcard)
コード例 #49
0
ファイル: utils.py プロジェクト: patrickstuedi/ray
def get_function_args(callable):
    all_parameters = frozenset(signature(callable).parameters)
    return list(all_parameters)
コード例 #50
0
ファイル: calibration.py プロジェクト: Asgardian8740/Django
    def fit(self, X, y, sample_weight=None):
        """Fit the calibrated model.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            Training data.

        y : array-like of shape (n_samples,)
            Target values.

        sample_weight : array-like of shape (n_samples,), default=None
            Sample weights. If None, then samples are equally weighted.

        Returns
        -------
        self : object
            Returns an instance of self.
        """
        check_classification_targets(y)
        X, y = indexable(X, y)

        if self.base_estimator is None:
            # we want all classifiers that don't expose a random_state
            # to be deterministic (and we don't want to expose this one).
            base_estimator = LinearSVC(random_state=0)
        else:
            base_estimator = self.base_estimator

        self.calibrated_classifiers_ = []
        if self.cv == "prefit":
            # `classes_` and `n_features_in_` should be consistent with that
            # of base_estimator
            if isinstance(self.base_estimator, Pipeline):
                check_is_fitted(self.base_estimator[-1])
            else:
                check_is_fitted(self.base_estimator)
            with suppress(AttributeError):
                self.n_features_in_ = base_estimator.n_features_in_
            self.classes_ = self.base_estimator.classes_

            pred_method = _get_prediction_method(base_estimator)
            n_classes = len(self.classes_)
            predictions = _compute_predictions(pred_method, X, n_classes)

            calibrated_classifier = _fit_calibrator(
                base_estimator, predictions, y, self.classes_, self.method,
                sample_weight
            )
            self.calibrated_classifiers_.append(calibrated_classifier)
        else:
            X, y = self._validate_data(
                X, y, accept_sparse=['csc', 'csr', 'coo'],
                force_all_finite=False, allow_nd=True
            )
            # Set `classes_` using all `y`
            label_encoder_ = LabelEncoder().fit(y)
            self.classes_ = label_encoder_.classes_
            n_classes = len(self.classes_)

            # sample_weight checks
            fit_parameters = signature(base_estimator.fit).parameters
            supports_sw = "sample_weight" in fit_parameters
            if sample_weight is not None:
                sample_weight = _check_sample_weight(sample_weight, X)
                if not supports_sw:
                    estimator_name = type(base_estimator).__name__
                    warnings.warn(f"Since {estimator_name} does not support "
                                  "sample_weights, sample weights will only be"
                                  " used for the calibration itself.")

            # Check that each cross-validation fold can have at least one
            # example per class
            if isinstance(self.cv, int):
                n_folds = self.cv
            elif hasattr(self.cv, "n_splits"):
                n_folds = self.cv.n_splits
            else:
                n_folds = None
            if n_folds and np.any([np.sum(y == class_) < n_folds
                                   for class_ in self.classes_]):
                raise ValueError(f"Requesting {n_folds}-fold "
                                 "cross-validation but provided less than "
                                 f"{n_folds} examples for at least one class.")
            cv = check_cv(self.cv, y, classifier=True)

            if self.ensemble:
                parallel = Parallel(n_jobs=self.n_jobs)

                self.calibrated_classifiers_ = parallel(
                    delayed(_fit_classifier_calibrator_pair)(
                        clone(base_estimator), X, y, train=train, test=test,
                        method=self.method, classes=self.classes_,
                        supports_sw=supports_sw, sample_weight=sample_weight)
                    for train, test in cv.split(X, y)
                )
            else:
                this_estimator = clone(base_estimator)
                method_name = _get_prediction_method(this_estimator).__name__
                pred_method = partial(
                    cross_val_predict, estimator=this_estimator, X=X, y=y,
                    cv=cv, method=method_name, n_jobs=self.n_jobs
                )
                predictions = _compute_predictions(pred_method, X, n_classes)

                if sample_weight is not None and supports_sw:
                    this_estimator.fit(X, y, sample_weight)
                else:
                    this_estimator.fit(X, y)
                calibrated_classifier = _fit_calibrator(
                    this_estimator, predictions, y, self.classes_, self.method,
                    sample_weight
                )
                self.calibrated_classifiers_.append(calibrated_classifier)

        return self
コード例 #51
0
def test_instance_method():
    r"""Ensure instance methods' signature."""
    assert hasattr(ResSAttnRNNBlock, 'forward')
    assert inspect.signature(ResSAttnRNNBlock.forward) == Signature(
        parameters=[
            Parameter(
                name='self',
                kind=Parameter.POSITIONAL_OR_KEYWORD,
                default=Parameter.empty,
            ),
            Parameter(
                name='batch_tk_mask',
                kind=Parameter.POSITIONAL_OR_KEYWORD,
                annotation=torch.Tensor,
                default=Parameter.empty,
            ),
            Parameter(
                name='batch_tk_reps',
                kind=Parameter.POSITIONAL_OR_KEYWORD,
                annotation=torch.Tensor,
                default=Parameter.empty,
            ),
        ],
        return_annotation=torch.Tensor,
    )

    assert hasattr(ResSAttnRNNModel, '__init__')
    assert inspect.signature(ResSAttnRNNModel.__init__) == Signature(
        parameters=[
            Parameter(
                name='self',
                kind=Parameter.POSITIONAL_OR_KEYWORD,
                default=Parameter.empty,
            ),
            Parameter(
                name='d_emb',
                kind=Parameter.KEYWORD_ONLY,
                annotation=int,
                default=Parameter.empty,
            ),
            Parameter(
                name='d_hid',
                kind=Parameter.KEYWORD_ONLY,
                annotation=int,
                default=Parameter.empty,
            ),
            Parameter(
                name='n_hid_lyr',
                kind=Parameter.KEYWORD_ONLY,
                annotation=int,
                default=Parameter.empty,
            ),
            Parameter(
                name='n_post_hid_lyr',
                kind=Parameter.KEYWORD_ONLY,
                annotation=int,
                default=Parameter.empty,
            ),
            Parameter(
                name='n_pre_hid_lyr',
                kind=Parameter.KEYWORD_ONLY,
                annotation=int,
                default=Parameter.empty,
            ),
            Parameter(
                name='p_emb',
                kind=Parameter.KEYWORD_ONLY,
                annotation=float,
                default=Parameter.empty,
            ),
            Parameter(
                name='p_hid',
                kind=Parameter.KEYWORD_ONLY,
                annotation=float,
                default=Parameter.empty,
            ),
            Parameter(
                name='tknzr',
                kind=Parameter.KEYWORD_ONLY,
                annotation=BaseTknzr,
                default=Parameter.empty,
            ),
            Parameter(
                name='kwargs',
                kind=Parameter.VAR_KEYWORD,
                annotation=Optional[Dict],
            ),
        ],
        return_annotation=Signature.empty,
    )
コード例 #52
0
  def _create_workflow(
      self,
      pipeline_func: Callable,
      pipeline_name: Optional[Text] = None,
      pipeline_description: Optional[Text] = None,
      params_list: Optional[List[dsl.PipelineParam]] = None,
      pipeline_conf: Optional[dsl.PipelineConf] = None,
  ) -> Dict[Text, Any]:
    """ Internal implementation of create_workflow."""
    params_list = params_list or []

    # Create the arg list with no default values and call pipeline function.
    # Assign type information to the PipelineParam
    pipeline_meta = _extract_pipeline_metadata(pipeline_func)
    pipeline_meta.name = pipeline_name or pipeline_meta.name
    pipeline_meta.description = pipeline_description or pipeline_meta.description
    pipeline_name = sanitize_k8s_name(pipeline_meta.name)

    # Need to first clear the default value of dsl.PipelineParams. Otherwise, it
    # will be resolved immediately in place when being to each component.
    default_param_values = OrderedDict()

    if self._pipeline_root_param:
      params_list.append(self._pipeline_root_param)
    if self._pipeline_name_param:
      params_list.append(self._pipeline_name_param)

    for param in params_list:
      default_param_values[param.name] = param.value
      param.value = None

    args_list = []
    kwargs_dict = dict()
    signature = inspect.signature(pipeline_func)
    for arg_name, arg in signature.parameters.items():
      arg_type = None
      for input in pipeline_meta.inputs or []:
        if arg_name == input.name:
          arg_type = input.type
          break
      param = dsl.PipelineParam(sanitize_k8s_name(arg_name, True), param_type=arg_type)
      if arg.kind == inspect.Parameter.KEYWORD_ONLY:
        kwargs_dict[arg_name] = param
      else:
        args_list.append(param)

    with dsl.Pipeline(pipeline_name) as dsl_pipeline:
      pipeline_func(*args_list, **kwargs_dict)

    pipeline_conf = pipeline_conf or dsl_pipeline.conf # Configuration passed to the compiler is overriding. Unfortunately, it's not trivial to detect whether the dsl_pipeline.conf was ever modified.

    self._validate_exit_handler(dsl_pipeline)
    self._sanitize_and_inject_artifact(dsl_pipeline, pipeline_conf)

    # Fill in the default values by merging two param lists.
    args_list_with_defaults = OrderedDict()
    if pipeline_meta.inputs:
      args_list_with_defaults = OrderedDict([
        (sanitize_k8s_name(input_spec.name, True), input_spec.default)
        for input_spec in pipeline_meta.inputs
      ])

    if params_list:
      # Or, if args are provided by params_list, fill in pipeline_meta.
      for k, v in default_param_values.items():
        args_list_with_defaults[k] = v

      pipeline_meta.inputs = pipeline_meta.inputs or []
      for param in params_list:
        pipeline_meta.inputs.append(
            InputSpec(
                name=param.name,
                type=param.param_type,
                default=default_param_values[param.name]))

    op_transformers = [add_pod_env]
    pod_labels = {_SDK_VERSION_LABEL: kfp.__version__, _SDK_ENV_LABEL:_SDK_ENV_DEFAULT}
    op_transformers.append(add_pod_labels(pod_labels))
    op_transformers.extend(pipeline_conf.op_transformers)

    if self._mode == dsl.PipelineExecutionMode.V2_COMPATIBLE:
      # Add self._pipeline_name_param and self._pipeline_root_param to ops inputs
      # if they don't exist already.
      for op in dsl_pipeline.ops.values():
        insert_pipeline_name_param = True
        insert_pipeline_root_param = True
        for param in op.inputs:
          if param.name == self._pipeline_name_param.name:
            insert_pipeline_name_param = False
          elif param.name == self._pipeline_root_param.name:
            insert_pipeline_root_param = False

        if insert_pipeline_name_param:
          op.inputs.append(self._pipeline_name_param)
        if insert_pipeline_root_param:
          op.inputs.append(self._pipeline_root_param)

    workflow = self._create_pipeline_workflow(
        args_list_with_defaults,
        dsl_pipeline,
        op_transformers,
        pipeline_conf,
    )

    from ._data_passing_rewriter import fix_big_data_passing
    workflow = fix_big_data_passing(workflow)

    workflow = _data_passing_rewriter.add_pod_name_passing(
        workflow, str(self._pipeline_root_param or None))

    if pipeline_conf and pipeline_conf.data_passing_method != None:
      workflow = pipeline_conf.data_passing_method(workflow)

    metadata = workflow.setdefault('metadata', {})
    annotations = metadata.setdefault('annotations', {})
    labels = metadata.setdefault('labels', {})

    annotations[_SDK_VERSION_LABEL] = kfp.__version__
    annotations['pipelines.kubeflow.org/pipeline_compilation_time'] = datetime.datetime.now().isoformat()
    annotations['pipelines.kubeflow.org/pipeline_spec'] = json.dumps(pipeline_meta.to_dict(), sort_keys=True)

    if self._mode == dsl.PipelineExecutionMode.V2_COMPATIBLE:
      annotations['pipelines.kubeflow.org/v2_pipeline'] = "true"
      labels['pipelines.kubeflow.org/v2_pipeline'] = "true"


    # Labels might be logged better than annotations so adding some information here as well
    labels[_SDK_VERSION_LABEL] = kfp.__version__

    return workflow
コード例 #53
0
ファイル: api.py プロジェクト: vijoin/ibis
def udf(
    client,
    python_func,
    in_types,
    out_type,
    schema=None,
    replace=False,
    name=None,
):
    """Defines a UDF in the database

    Parameters
    ----------
    client: PostgreSQLClient
    python_func: python function
    in_types: List[DataType]
    out_type : DataType
    schema: str - optionally specify the schema in which to define the UDF
    replace: bool - replace UDF in database if already exists
    name: str - name for the UDF to be defined in database

    Returns
    -------
    Callable

        The ibis UDF object as a wrapped function
    """
    if name is None:
        internal_name = python_func.__name__
    else:
        internal_name = name
    signature = inspect.signature(python_func)
    parameter_names = signature.parameters.keys()
    replace_text = ' OR REPLACE ' if replace else ''
    schema_fragment = (schema + '.') if schema else ''
    template = """CREATE {replace} FUNCTION
{schema_fragment}{name}({signature})
RETURNS {return_type}
LANGUAGE plpythonu
AS $$
{func_definition}
return {internal_name}({args})
$$;
"""

    postgres_signature = ', '.join(
        '{name} {type}'.format(name=name, type=ibis_to_postgres_str(type_))
        for name, type_ in zip(parameter_names, in_types))
    return_type = ibis_to_postgres_str(out_type)
    # If function definition is indented extra,
    # Postgres UDF will fail with indentation error.
    func_definition = dedent(inspect.getsource(python_func))
    if func_definition.strip().startswith('@'):
        raise PostgresUDFError(
            'Use of decorators on a function to be turned into Postgres UDF '
            'is not supported. The body of the UDF must be wholly '
            'self-contained. ')
        # Since the decorator syntax does not first bind
        # the function name to the wrapped function but instead includes
        # the decorator(s). Therefore, the decorators themselves will
        # be included in the string coming from `inspect.getsource()`.
        # Since the decorator objects are not defined, execution of the
        # UDF results in a NameError.

    formatted_sql = template.format(
        replace=replace_text,
        schema_fragment=schema_fragment,
        name=internal_name,
        signature=postgres_signature,
        return_type=return_type,
        func_definition=func_definition,
        # for internal_name, need to make sure this works if passing
        # name parameter
        internal_name=python_func.__name__,
        args=', '.join(parameter_names),
    )
    client.con.execute(formatted_sql)
    return existing_udf(
        name=internal_name,
        input_types=in_types,
        output_type=out_type,
        schema=schema,
        parameters=parameter_names,
    )
コード例 #54
0
ファイル: _airflow_op.py プロジェクト: rpatil524/pipelines
def _create_component_spec_from_airflow_op(
    op_class: type,
    base_image: str = _default_airflow_base_image,
    result_output_name: str = 'Result',
    variables_dict_output_name: str = 'Variables',
    xcoms_dict_output_name: str = 'XComs',
    variables_to_output: List[str] = None,
    xcoms_to_output: List[str] = None,
    modules_to_capture: List[str] = None,
):
    variables_output_names = variables_to_output or []
    xcoms_output_names = xcoms_to_output or []
    modules_to_capture = modules_to_capture or [op_class.__module__]
    modules_to_capture.append(_run_airflow_op.__module__)

    output_names = []
    if result_output_name is not None:
        output_names.append(result_output_name)
    if variables_dict_output_name is not None:
        output_names.append(variables_dict_output_name)
    if xcoms_dict_output_name is not None:
        output_names.append(xcoms_dict_output_name)
    output_names.extend(variables_output_names)
    output_names.extend(xcoms_output_names)

    from collections import namedtuple
    returnType = namedtuple('AirflowOpOutputs', output_names)

    def _run_airflow_op_closure(*op_args, **op_kwargs) -> returnType:
        (result, variables, xcoms) = _run_airflow_op(op_class, *op_args,
                                                     **op_kwargs)

        output_values = {}

        import json
        if result_output_name is not None:
            output_values[result_output_name] = str(result)
        if variables_dict_output_name is not None:
            output_values[variables_dict_output_name] = json.dumps(variables)
        if xcoms_dict_output_name is not None:
            output_values[xcoms_dict_output_name] = json.dumps(xcoms)
        for name in variables_output_names:
            output_values[name] = variables[name]
        for name in xcoms_output_names:
            output_values[name] = xcoms[name]

        return returnType(**output_values)

    # Hacking the function signature so that correct component interface is generated
    import inspect
    parameters = inspect.signature(op_class).parameters.values()
    #Filtering out `*args` and `**kwargs` parameters that some operators have
    parameters = [
        param for param in parameters
        if param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD
    ]
    sig = inspect.Signature(
        parameters=parameters,
        return_annotation=returnType,
    )
    _run_airflow_op_closure.__signature__ = sig
    _run_airflow_op_closure.__name__ = op_class.__name__

    return _func_to_component_spec(_run_airflow_op_closure,
                                   base_image=base_image,
                                   use_code_pickling=True,
                                   modules_to_capture=modules_to_capture)
コード例 #55
0
def test_inherent_method():
    r'''Ensure inherent methods' signature are same as base class.'''
    assert (
        inspect.signature(SAttnRNNModel.forward)
        ==
        inspect.signature(ResSAttnRNNModel.forward)
    )

    assert (
        inspect.signature(SAttnRNNModel.load)
        ==
        inspect.signature(ResSAttnRNNModel.load)
    )

    assert (
        inspect.signature(SAttnRNNModel.loss_fn)
        ==
        inspect.signature(ResSAttnRNNModel.loss_fn)
    )

    assert (
        inspect.signature(SAttnRNNModel.pred)
        ==
        inspect.signature(ResSAttnRNNModel.pred)
    )

    assert (
        inspect.signature(SAttnRNNModel.ppl)
        ==
        inspect.signature(ResSAttnRNNModel.ppl)
    )

    assert (
        inspect.signature(SAttnRNNModel.save)
        ==
        inspect.signature(ResSAttnRNNModel.save)
    )

    assert (
        inspect.signature(SAttnRNNModel.train_parser)
        ==
        inspect.signature(ResSAttnRNNModel.train_parser)
    )
コード例 #56
0
# -*- coding: utf-8 -*-
# @Time    : 2019/7/10 23:38
# @Author  : songxy
# @Email   : [email protected]
# @File    : 3.partial.py


from functools import partial
import inspect

#### partial 函数对函数参数进行固定返回一个新函数

def add(x,y):
    return x+y

newadd = partial(add,x=50)
print(newadd(y=100,x=2222))
sig = inspect.signature(newadd)
params = sig.parameters.items()
print(params)
コード例 #57
0
ファイル: network.py プロジェクト: ideechy/stylegan2-ada
    def _init_graph(self) -> None:
        assert self._var_inits is not None
        assert self._input_templates is None
        assert self._output_templates is None
        assert self._own_vars is None

        # Initialize components.
        if self._components is None:
            self._components = util.EasyDict()

        # Choose build func kwargs.
        build_kwargs = dict(self.static_kwargs)
        build_kwargs["is_template_graph"] = True
        build_kwargs["components"] = self._components

        # Override scope and device, and ignore surrounding control dependencies.
        with tfutil.absolute_variable_scope(
                self.scope, reuse=False), tfutil.absolute_name_scope(
                    self.scope), tf.device(
                        self.device), tf.control_dependencies(None):
            assert tf.get_variable_scope().name == self.scope
            assert tf.get_default_graph().get_name_scope() == self.scope

            # Create input templates.
            self._input_templates = []
            for param in inspect.signature(
                    self._build_func).parameters.values():
                if param.kind == param.POSITIONAL_OR_KEYWORD and param.default is param.empty:
                    self._input_templates.append(
                        tf.placeholder(tf.float32, name=param.name))

            # Call build func.
            out_expr = self._build_func(*self._input_templates, **build_kwargs)

        # Collect output templates and variables.
        assert tfutil.is_tf_expression(out_expr) or isinstance(out_expr, tuple)
        self._output_templates = [
            out_expr
        ] if tfutil.is_tf_expression(out_expr) else list(out_expr)
        self._own_vars = OrderedDict(
            (var.name[len(self.scope) + 1:].split(":")[0], var)
            for var in tf.global_variables(self.scope + "/"))

        # Check for errors.
        if len(self._input_templates) == 0:
            raise ValueError("Network build func did not list any inputs.")
        if len(self._output_templates) == 0:
            raise ValueError("Network build func did not return any outputs.")
        if any(not tfutil.is_tf_expression(t) for t in self._output_templates):
            raise ValueError("Network outputs must be TensorFlow expressions.")
        if any(t.shape.ndims is None for t in self._input_templates):
            raise ValueError(
                "Network input shapes not defined. Please call x.set_shape() for each input."
            )
        if any(t.shape.ndims is None for t in self._output_templates):
            raise ValueError(
                "Network output shapes not defined. Please call x.set_shape() where applicable."
            )
        if any(not isinstance(comp, Network)
               for comp in self._components.values()):
            raise ValueError(
                "Components of a Network must be Networks themselves.")
        if len(self._components) != len(
                set(comp.name for comp in self._components.values())):
            raise ValueError("Components of a Network must have unique names.")

        # Initialize variables.
        if len(self._var_inits):
            tfutil.set_vars({
                self._get_vars()[name]: value
                for name, value in self._var_inits.items()
                if name in self._get_vars()
            })
        remaining_inits = [
            var.initializer for name, var in self._own_vars.items()
            if name not in self._var_inits
        ]
        if self._all_inits_known:
            assert len(remaining_inits) == 0
        else:
            tfutil.run(remaining_inits)
        self._var_inits = None
コード例 #58
0
 def default_attributes(cls) -> dict:
     init_signature = inspect.signature(cls)
     return {k: v.default for k, v in init_signature.parameters.items()}
コード例 #59
0
ファイル: compat.py プロジェクト: Cecijvp/DjangoUdemyCourse
def _format_args(func: Callable[..., Any]) -> str:
    return str(signature(func))
コード例 #60
0
def model_setup(algorithm_name, env, policy, **kwargs):
    """
        This function takes in the algorithm name specified by the
        user in the CLI, the environment to train on, the policy, and
        finally an optional dictionary of parameters to set for the agent.

    Parameter(s):
    -------------
    algorithm_name: type(String)
        The name of the algorithm to be used. Must be supported in stable baselines.
    env: type(Gym)
        The environment to train the agent on. Must be of type openAI Gym.
    policy: type(String)
        The policy to train the agent with. Not all policies are compatible with
        all algorithms.
    kwargs: type(dict)
        Dictionary of algorithm variables that can be used instead of the default values.

    Returns:
    --------
    model: type(Object)
        A machine learning model
    """
    assert algorithm_name in list_of_algorithms, 'Algorithm must be supported by stable baselines'

    model = None

    if algorithm_name in [list_of_algorithms[0]]:
        # TD3 algorithm
        # Get the default values in case no user specifications
        signature = inspect.signature(TD3.__init__)

        g = kwargs.pop('g', signature.parameters['gamma'].default)
        bf = kwargs.pop('bf', signature.parameters['buffer_size'].default)
        nstd = kwargs.pop('nstd', signature.parameters['target_policy_noise'].default)
        lst = kwargs.pop('lst', signature.parameters['learning_starts'].default)
        bch = kwargs.pop('bch', signature.parameters['batch_size'].default)
        lr = kwargs.pop('lr', signature.parameters['learning_rate'].default)
        tf = kwargs.pop('tf', signature.parameters['train_freq'].default)
        grad = kwargs.pop('grad', signature.parameters['gradient_steps'].default)
        pkwargs = kwargs.pop('pkwargs', signature.parameters['policy_kwargs'].default)
        v = kwargs.pop('v', signature.parameters['verbose'].default)

        model = TD3(policy=policy,
                    env=env,
                    gamma=g,
                    buffer_size=bf,
                    target_policy_noise=nstd,
                    learning_starts=lst,
                    batch_size=bch,
                    learning_rate=lr,
                    train_freq=tf,
                    gradient_steps=grad,
                    verbose=v,
                    policy_kwargs=pkwargs)

    elif algorithm_name in [list_of_algorithms[1]]:
        # DDPG algorithm
        pass
    elif algorithm_name in [list_of_algorithms[2]]:
        # PPO2 algorithm
        # Get the default values in case no user specifications
        signature = inspect.signature(PPO2.__init__)

        lr = kwargs.pop('lr', signature.parameters['learning_rate'].default)
        nsteps = kwargs.pop('nsteps', signature.parameters['n_steps'].default)
        nbtch = kwargs.pop('nbtch', signature.parameters['nminibatches'].default)
        lbd = kwargs.pop('lbd', signature.parameters['lam'].default)
        g = kwargs.pop('g', signature.parameters['gamma'].default)
        nep = kwargs.pop('nep', signature.parameters['noptepochs'].default)
        ent = kwargs.pop('ent', signature.parameters['ent_coef'].default)
        cl = kwargs.pop('cl', signature.parameters['cliprange'].default)
        v = kwargs.pop('v', signature.parameters['verbose'].default)

        model = PPO2(policy=policy,
                     env=env,
                     learning_rate=lr,
                     verbose=v,
                     ent_coef=ent,
                     lam=lbd,
                     gamma=g,
                     n_steps=nsteps,
                     nminibatches=nbtch,
                     noptepochs=nep,
                     cliprange=cl)

    elif algorithm_name in [list_of_algorithms[3]]:
        # PPO1 algorithm
        pass
    elif algorithm_name in [list_of_algorithms[4]]:
        # A2C algorithm
        pass
    elif algorithm_name in [list_of_algorithms[5]]:
        # ACER algorithm
        pass
    elif algorithm_name in [list_of_algorithms[6]]:
        # ACKTR algorithm
        pass
    elif algorithm_name in [list_of_algorithms[7]]:
        # DQN algorithm
        pass
    elif algorithm_name in [list_of_algorithms[8]]:
        # GAIL algorithm
        pass
    elif algorithm_name in [list_of_algorithms[9]]:
        # HER algorithm
        pass
    elif algorithm_name in [list_of_algorithms[10]]:
        # SAC algorithm
        # Get the default values in case no user specifications
        signature = inspect.signature(SAC.__init__)

        lr = kwargs.pop('lr', signature.parameters['learning_rate'].default)
        bf = kwargs.pop('bf', signature.parameters['buffer_size'].default)
        bch = kwargs.pop('bch', signature.parameters['batch_size'].default)
        ent = kwargs.pop('ent', signature.parameters['ent_coef'].default)
        tf = kwargs.pop('tf', signature.parameters['train_freq'].default)
        grad = kwargs.pop('grad', signature.parameters['gradient_steps'].default)
        lst = kwargs.pop('lst', signature.parameters['learning_starts'].default)
        v = kwargs.pop('v', signature.parameters['verbose'].default)

        model = SAC(policy=policy,
                    env=env,
                    learning_rate=lr,
                    buffer_size=bf,
                    batch_size=bch,
                    ent_coef=ent,
                    train_freq=tf,
                    gradient_steps=grad,
                    learning_starts=lst,
                    verbose=v)

    elif algorithm_name in [list_of_algorithms[11]]:
        # TRPO algorithm
        pass

    return model