Example #1
0
    def __init__(self,data,model,prior=None,llh='poisson'):
        self.llh = llh
        self.prior = prior
        self.model = model
        self.data = data
        self.multi_counts = np.histogramdd(self.data,bins=self.model.bin_edges)[0]
        self.counts = self.multi_counts.flatten()

        params = inspect.getargspec(self.model.weighter).args
        params.remove('aux_data')

        fcn_str = "fcn = lambda "
        for param in params:
            fcn_str += param+","
        fcn_str = fcn_str[:-1] + ": self.__call__(**{"
        for param in params:
            fcn_str += "'"+param+"':"+param+","
        fcn_str = fcn_str[:-1] + "})"
        if not prior is None:
            fcn_str += " + self.prior(**{"
            for param in params:
                fcn_str += "'"+param+"':"+param+","
            fcn_str = fcn_str[:-1] + "})"        

        exec fcn_str in locals()

        self.minuit = minuit.Minuit2(fcn)

        self.seeds = dict(zip(params,inspect.getargspec(self.model.weighter).defaults))
        self.minuit.values = self.seeds.copy()
Example #2
0
        def _any(thing, fields=None):
            """
            Dispatch, all types are routed through here.
            """
            ret = None

            if isinstance(thing, QuerySet):
                ret = _qs(thing, fields)
            elif isinstance(thing, (tuple, list, set)):
                ret = _list(thing, fields)
            elif isinstance(thing, dict):
                ret = _dict(thing, fields)
            elif isinstance(thing, decimal.Decimal):
                ret = str(thing)
            elif isinstance(thing, Model):
                ret = _model(thing, fields)
            elif isinstance(thing, HttpResponse):
                raise HttpStatusCode(thing)
            elif inspect.isfunction(thing):
                if not inspect.getargspec(thing)[0]:
                    ret = _any(thing())
            elif hasattr(thing, '__emittable__'):
                f = thing.__emittable__
                if inspect.ismethod(f) and len(inspect.getargspec(f)[0]) == 1:
                    ret = _any(f())
            elif repr(thing).startswith("<django.db.models.fields.related.RelatedManager"):
                ret = _any(thing.all())
            else:
                ret = smart_unicode(thing, strings_only=True)

            return ret
Example #3
0
    def test_setupEnvironment(self):
        """
        L{UnixApplicationRunner.startApplication} calls
        L{UnixApplicationRunner.setupEnvironment} with the chroot, rundir,
        nodaemon, umask, and pidfile parameters from the configuration it is
        constructed with.
        """
        options = twistd.ServerOptions()
        options.parseOptions([
                '--nodaemon',
                '--umask', '0070',
                '--chroot', '/foo/chroot',
                '--rundir', '/foo/rundir',
                '--pidfile', '/foo/pidfile'])
        application = service.Application("test_setupEnvironment")
        self.runner = UnixApplicationRunner(options)

        args = []
        def fakeSetupEnvironment(self, chroot, rundir, nodaemon, umask, pidfile):
            args.extend((chroot, rundir, nodaemon, umask, pidfile))

        # Sanity check
        self.assertEqual(
            inspect.getargspec(self.runner.setupEnvironment),
            inspect.getargspec(fakeSetupEnvironment))

        self.patch(UnixApplicationRunner, 'setupEnvironment', fakeSetupEnvironment)
        self.patch(UnixApplicationRunner, 'shedPrivileges', lambda *a, **kw: None)
        self.patch(app, 'startApplication', lambda *a, **kw: None)
        self.runner.startApplication(application)

        self.assertEqual(
            args,
            ['/foo/chroot', '/foo/rundir', True, 56, '/foo/pidfile'])
Example #4
0
def getargspec(func):
    '''Get the argument specification of the `func`.
    
    `func` is a Python function, built-in function or bound method.
    
    It get the argument specification by parsing documentation of the
    function if `func` is a built-in function.
    
    .. versionchanged:: 0.1.4
       Remove `self` automatively if `func` is a method.

    .. versionadded:: 0.1.3'''

    if inspect.isfunction(func):
        return inspect.getargspec(func)

    if inspect.ismethod(func):
        argspec = inspect.getargspec(func)
        argspec[0].pop(0)
        return argspec

    def strbetween(s, a, b):
        return s[s.find(a): s.rfind(b)]

    argspecdoc = (inspect.getdoc(func) or '').split('\n')[0]
    argpart = strbetween(argspecdoc, '(', ')')
    args = argpart.split(',')
    args = [ arg.strip(' ()[]') for arg in args ]
    args = [ arg for arg in args if arg ]

    defaultpart = strbetween(argspecdoc, '[', ']')
    defaultcount = len([d for d in defaultpart.split(',') if d.strip('[]')])

    return (args or None, None, None, (None,) * defaultcount or None)
Example #5
0
def identical_signature_wrapper(original_function, wrapped_function):
    '''
    Return a function with identical signature as ``original_function``'s which
    will call the ``wrapped_function``.
    '''
    context = {'__wrapped__': wrapped_function}
    function_def = compile(
        'def {0}({1}):\n'
        '    return __wrapped__({2})'.format(
            # Keep the original function name
            original_function.__name__,
            # The function signature including defaults, ie, 'timeout=1'
            inspect.formatargspec(
                *inspect.getargspec(original_function)
            )[1:-1],
            # The function signature without the defaults
            inspect.formatargspec(
                formatvalue=lambda val: '',
                *inspect.getargspec(original_function)
            )[1:-1]
        ),
        '<string>',
        'exec'
    )
    exec function_def in context
    return wraps(original_function)(context[original_function.__name__])
Example #6
0
 def realShow(self):
     self.machine.displaying = True
     t = None
     for (k,v) in self.machine.types.iteritems():
         if self.machine.type == k:
             args = inspect.getargspec(v.__init__).args
             extras = {}
             if 'parent' in args:
                 extras['parent'] = self.machine.parent
             if 'time' in args:
                 extras['time'] = self.time
             if k == "libnotify" or k == "twmn":
                 t = v(self.title, self.msg, self.icon, **extras)
             else:
                 t = v(self.machine, self.title, self.msg, self.icon, **extras)
             # Use libnotify's urgency setting
             if k == "libnotify":
                 if self.importance < 0:
                     t.set_urgency(pynotify.URGENCY_CRITICAL)
                 elif self.importance == 0:
                     t.set_urgency(pynotify.URGENCY_NORMAL)
                 elif self.importance > 0:
                     t.set_urgency(pynotify.URGENCY_LOW)
             break
     if not t:
         if 'default' in self.machine.types:
             if 'parent' in inspect.getargspec(self.machine.types['default'].__init__).args:
                 t = self.machine.types['default'](self.machine, self.title, self.msg, self.icon, self.machine.parent)
             else:
                 t = self.machine.types['default'](self.machine, self.title, self.msg, self.icon)
         else:
             t = DefaultToast(self.title, self.msg, self.icon)
     t.show()
    def getargs(self,moduleName,className,method) :
        '''
          This will return the list of arguments in a method of python module of class.
          It accepts method list as an argument.
        '''
        print "Message : Argument list is being obtained for each method"
        methodArgsDict = {}
        if className == None:
            moduleList = moduleName.split(".")
            for index,name in enumerate(method) :
                Module = __import__(moduleList[len(moduleList) -1], globals(), locals(), [moduleList[len(moduleList) -2]], -1)
                try :
                    names = vars(Module)[name]
                except KeyError:
                    print "Message : method '" + name + "'does not exists,Continued with including it. "
                    return False
                argumentList = inspect.getargspec(names) #inspect.getargvalues(name)
                methodArgsDict[name] = argumentList[0]
        else :
            moduleList = moduleName.split(".")
            for index,name in enumerate(method) :
                Module = __import__(moduleList[len(moduleList) - 1], globals(), locals(), [className], -1)
                Class = getattr(Module, className)
                try :
                    names = vars(Class)[name]
                except KeyError :
                    print "Message : method '" + name + "'does not exists,Continued with include it."
                    return False

                argumentList = inspect.getargspec(names) #inspect.getargvalues(name)
                methodArgsDict[name] = argumentList[0]

        return methodArgsDict
Example #8
0
def _ensure_4args(func):
    """ Conditionally wrap function to ensure 4 input arguments

    Parameters
    ----------
    func: callable
        with two, three or four positional arguments

    Returns
    -------
    callable which possibly ignores 0, 1 or 2 positional arguments

    """
    if func is None:
        return None
    if isinstance(func, _Blessed):  # inspect on __call__ is a hassle...
        return func

    self_arg = 1 if inspect.ismethod(func) else 0
    if len(inspect.getargspec(func)[0]) == 4 + self_arg:
        return func
    if len(inspect.getargspec(func)[0]) == 3 + self_arg:
        return lambda x, y, p=(), backend=math: func(x, y, p)
    elif len(inspect.getargspec(func)[0]) == 2 + self_arg:
        return lambda x, y, p=(), backend=math: func(x, y)
    else:
        raise ValueError("Incorrect numer of arguments")
Example #9
0
    def new_handler(self, acl, handler_cls, *args, **kwargs):
        '''
        Instantiates a new handler object, which is called remotely
        by others. The user can control the effect of the call by
        implementing the remote method in the local endpoint class. The
        returned reference can be called locally and will behave as a
        regular instance.

        Arguments:
            acl -- Access control list (see ACL class)
            handler_clss -- The local (duck) type.
            *args -- Constructor arguments for the local type.
            **kwargs -- Constructor keyworded arguments for the local
                type.
        '''
        argspec = inspect.getargspec(handler_cls.__init__)
        base_argspec = inspect.getargspec(Endpoint.__init__)
        if(argspec == base_argspec):
            result = handler_cls(self, self._client.boundjid.full)
        else:
            result = handler_cls(*args, **kwargs)
            Endpoint.__init__(result, self, self._client.boundjid.full)
        method_dict = result.get_methods()
        for method_name, method in method_dict.iteritems():
            #!!! self._client.plugin['xep_0009'].register_call(result.FQN(), method, method_name)
            self._register_call(result.FQN(), method, method_name)
        self._register_acl(result.FQN(), acl)
        return result
Example #10
0
 def superdo(self, *args, **kwargs):
     """Like calling :meth:`super()` with the right arguments
     
     ??? check if it works on multiple levels"""
     frame = sys._getframe(1)
     superObj = super(self.__class__, self)
     selector = frame.f_code.co_name
     selectorMethod = getattr(superObj, selector, None)
     if selectorMethod:
         if not(args or kwargs):
             srcargname, srcargs, srckwargs, vlocals = inspect.getargvalues(frame)
             srcdefaults = inspect.getargspec(getattr(self, selector))[3]
             if not srcdefaults: srcdefaults = []
             nargs = len(srcargname) - len(srcdefaults)
             args = [vlocals[key] for key in srcargname[1:nargs]]
             if srcargs: args.extend(vlocals[srcargs])
             kwargs = dict([(key, vlocals[key]) for key in srcargname[nargs:]])
             if  srckwargs: kwargs.update(vlocals[srckwargs])
             dstargname, dstargs, dstkwargs, dstdefaults = inspect.getargspec(selectorMethod)
             if not dstdefaults: dstdefaults = []
             nargs = len(dstargname) - len(dstdefaults) - 1
             if not dstargs: args = args[:nargs]
             if not dstkwargs:
                 dstkw = dstargname[-len(dstdefaults):]
                 kwargs = dict([(key, value) for key, value in kwargs.items() if key in dstkw])
         return selectorMethod(*args, **kwargs)
Example #11
0
    def docroutine(self, object, name, mod=None, funcs={}, classes={}, methods={}, cl=None):
        """Produce HTML documentation for a function or method object."""

        anchor = (cl and cl.__name__ or "") + "-" + name
        note = ""

        title = '<a name="%s"><strong>%s</strong></a>' % (self.escape(anchor), self.escape(name))

        if inspect.ismethod(object):
            args, varargs, varkw, defaults = inspect.getargspec(object)
            # exclude the argument bound to the instance, it will be
            # confusing to the non-Python user
            argspec = inspect.formatargspec(args[1:], varargs, varkw, defaults, formatvalue=self.formatvalue)
        elif inspect.isfunction(object):
            args, varargs, varkw, defaults = inspect.getargspec(object)
            argspec = inspect.formatargspec(args, varargs, varkw, defaults, formatvalue=self.formatvalue)
        else:
            argspec = "(...)"

        if isinstance(object, tuple):
            argspec = object[0] or argspec
            docstring = object[1] or ""
        else:
            docstring = pydoc.getdoc(object)

        decl = title + argspec + (note and self.grey('<font face="helvetica, arial">%s</font>' % note))

        doc = self.markup(docstring, self.preformat, funcs, classes, methods)
        doc = doc and "<dd><tt>%s</tt></dd>" % doc
        return "<dl><dt>%s</dt>%s</dl>\n" % (decl, doc)
    def serialize(self, obj):
        """
        Convert any object into a serializable representation.
        """

        if isinstance(obj, (dict, models.Model)):
            # Model instances & dictionaries
            return self.serialize_model(obj)
        elif isinstance(obj, (tuple, list, set, QuerySet, types.GeneratorType)):
            # basic iterables
            return self.serialize_iter(obj)
        elif isinstance(obj, models.Manager):
            # Manager objects
            return self.serialize_manager(obj)
        elif inspect.isfunction(obj) and not inspect.getargspec(obj)[0]:
            # function with no args
            return self.serialize_func(obj)
        elif inspect.ismethod(obj) and len(inspect.getargspec(obj)[0]) <= 1:
            # bound method
            return self.serialize_func(obj)

        # Protected types are passed through as is.
        # (i.e. Primitives like None, numbers, dates, and Decimals.)
        if is_protected_type(obj):
            return obj

        # All other values are converted to string.
        return self.serialize_fallback(obj)
Example #13
0
def try_run(obj, names):
    """Given a list of possible method names, try to run them with the
    provided object. Keep going until something works. Used to run
    setup/teardown methods for module, package, and function tests.
    """
    for name in names:
        func = getattr(obj, name, None)
        if func is not None:
            if type(obj) == types.ModuleType:
                # py.test compatibility
                if isinstance(func, types.FunctionType):
                    args, varargs, varkw, defaults = \
                        inspect.getargspec(func)
                else:
                    # Not a function. If it's callable, call it anyway
                    if hasattr(func, '__call__') and not inspect.ismethod(func):
                        func = func.__call__
                    try:
                        args, varargs, varkw, defaults = \
                            inspect.getargspec(func)
                        args.pop(0) # pop the self off
                    except TypeError:
                        raise TypeError("Attribute %s of %r is not a python "
                                        "function. Only functions or callables"
                                        " may be used as fixtures." %
                                        (name, obj))
                if len(args):
                    log.debug("call fixture %s.%s(%s)", obj, name, obj)
                    return func(obj)
            log.debug("call fixture %s.%s", obj, name)
            return func()
def function_check(func1, func2):
    """
        Function param tester
    """
    func1_args = inspect.getargspec(func1)
    func2_args = inspect.getargspec(func2)
    return func1_args == func2_args
Example #15
0
def extract_tb(tb, limit = None):
    list = []
    n = 0
    while tb is not None and (limit is None or n < limit):
        f = tb.tb_frame
        lineno = tb.tb_lineno
        co = f.f_code
        filename = co.co_filename
        name = co.co_name
        linecache.checkcache(filename)
        line = ""
        if '__file__' in f.f_globals:
            for global_name, x in f.f_globals.items():
                if global_name.startswith('_'):
                    continue
                   
                if inspect.isfunction(x):
                    if global_name == name and x.__code__ == co:
                        args, varargs, varkw, defaults = inspect.getargspec(x)
                        name += inspect.formatargspec(args, varargs, varkw, defaults)
                elif inspect.isclass(x):
                    method = find_method_in_class(name,co,x)
                    if not method is None:
                        args, varargs, varkw, defaults = inspect.getargspec(method)
                        name += inspect.formatargspec(args, varargs, varkw, defaults)
                        name = x.__name__ + '.' + name
                            
        if line:
            line = line.strip()
        else: 
            line = None
        list.append((filename, lineno, name, line))
        tb = tb.tb_next
        n = n+1
    return list
Example #16
0
def advanced_wrap(f, wrapper):
    """
    Wrap a decorated function while keeping the same keyword arguments
    """
    f_sig = list(inspect.getargspec(f))
    wrap_sig = list(inspect.getargspec(wrapper))

    # Update the keyword arguments of the wrapper
    if f_sig[3] is None or f_sig[3] == []:
        f_sig[3], f_kwargs = [], []
    else:
        f_kwargs = f_sig[0][-len(f_sig[3]):]

    for key, default in zip(f_kwargs, f_sig[3]):
        wrap_sig[0].append(key)
        wrap_sig[3] = wrap_sig[3] + (default, )

    wrap_sig[2] = None  # Remove kwargs
    src = "lambda %s: " % (inspect.formatargspec(*wrap_sig)[1:-1])
    new_args = inspect.formatargspec(
        wrap_sig[0], wrap_sig[1], wrap_sig[2], f_kwargs,
        formatvalue=lambda x: '=' + x)
    src += 'wrapper%s\n' % new_args

    decorated = eval(src, locals())
    return update_wrapper(decorated, f)
Example #17
0
File: neo.py Project: vtphan/neo
    def dispatch(self, controller, u_args, qs, json_out):
        def build_context(context=None):
            if type(context)==str: return context
            new_context = dict(url=self.url, quote=self.quote, escape=escape,
                               markup=markup, message=cookie.message)
            new_context.update(self.internal)
            if context: new_context.update(context)
            return new_context

        con, tmpl = self.route[controller]
        f = getattr(con, request.method.lower())
        args = getargspec(f).args
        if len(u_args) > len(args): response.error('too many parameters','500')
        def_args = getargspec(f).defaults
        nondef_args = args[:-len(def_args)] if def_args else args
        params = {args[i]:v for i,v in enumerate(u_args)}
        params.update({ k:v for k,v in qs.items() if k in args })
        missing_args = [k for k in nondef_args if k not in params]
        try:
            if missing_args:
                if 'HTTP_REFERER' in request.env:
                    flash(', '.join(missing_args)+' missing')
                    self.redirect(request.env['HTTP_REFERER'])
                else: response.error(', '.join(missing_args)+' missing', '500')
            else:
                ret = f(**params) if params else f()
                if json_out: response.out = json.dumps(ret)
                else:
                    cxt = build_context(ret)
                    response.out = tmpl.render(cxt) if type(cxt)==dict else cxt 
        except Exception as err:
            if err.args and err.args[0] == 'NeoRedirect': return
            else: response.error(traceback.format_exc(), '500', False)
Example #18
0
File: macro.py Project: csimag/rose
def transform_config(config, meta_config, transformer_macro, modules,
                     macro_info_tuples, opt_non_interactive=False):
    """Run transformer custom macros on the config and return problems."""
    macro_change_dict = {}
    for module_name, class_name, method, help in macro_info_tuples:
        if method != TRANSFORM_METHOD:
            continue
        macro_name = ".".join([module_name, class_name])
        if macro_name != transformer_macro:
            continue
        for module in modules:
            if module.__name__ == module_name:
                macro_inst = getattr(module, class_name)()
                macro_method = getattr(macro_inst, method)
                break
        res = {}
        if not opt_non_interactive:
            arglist = inspect.getargspec(macro_method).args
            defaultlist = inspect.getargspec(macro_method).defaults
            optionals = {}
            while defaultlist is not None and len(defaultlist) > 0:
                if arglist[-1] not in ["self", "config", "meta_config"]:
                    optionals[arglist[-1]] = defaultlist[-1]
                    arglist = arglist[0:-1]
                    defaultlist = defaultlist[0:-1]
                else:
                    break
            if optionals:
                res = get_user_values(optionals)
        return macro_method(config, meta_config, **res)
    return config, []
Example #19
0
def get_spec(func):
    """Returns (args, kwargs) tuple for a function
    >>> import re
    >>> get_spec(re.match)
    (['pattern', 'string'], {'flags': 0})

    >>> class Test(object):
    ...     def __call__(self, val):
    ...         pass
    ...     def method(self, val, flags=0):
    ...         pass

    >>> get_spec(Test)
    (['self', 'val'], {})

    >>> get_spec(Test.method)
    (['self', 'val'], {'flags': 0})

    >>> get_spec(Test().method)
    (['self', 'val'], {'flags': 0})
    """

    if inspect.isfunction(func) or inspect.ismethod(func):
        spec = inspect.getargspec(func)
    elif hasattr(func, '__call__'):
        spec = inspect.getargspec(func.__call__)
    else:
        raise TypeError('%s is not callable' % type(func))

    defaults = spec.defaults or []

    firstdefault = len(spec.args) - len(defaults)
    args = spec.args[:firstdefault]
    kwargs = dict(zip(spec.args[firstdefault:], defaults))
    return args, kwargs
Example #20
0
def is_tree(tree):
    try:
        tree_data = inspect.getargspec(tree)
        assert tree_data == inspect.getargspec(lambda dispatch: None)
        return all([is_tree(branch) for branch in branches(tree)])
    except:
        return False
Example #21
0
File: macro.py Project: csimag/rose
def validate_config(app_config, meta_config, run_macro_list, modules,
                    macro_info_tuples, opt_non_interactive=False):
    """Run validator custom macros on the config and return problems."""
    macro_problem_dict = {}
    for module_name, class_name, method, help in macro_info_tuples:
        macro_name = ".".join([module_name, class_name])
        if macro_name in run_macro_list and method == VALIDATE_METHOD:
            for module in modules:
                if module.__name__ == module_name:
                    macro_inst = getattr(module, class_name)()
                    macro_meth = getattr(macro_inst, method)
                    break
            res = {}
            if not opt_non_interactive:
                arglist = inspect.getargspec(macro_meth).args
                defaultlist = inspect.getargspec(macro_meth).defaults
                optionals = {}
                while defaultlist is not None and len(defaultlist) > 0:
                    if arglist[-1] not in ["self", "config", "meta_config"]:
                        optionals[arglist[-1]] = defaultlist[-1]
                        arglist = arglist[0:-1]
                        defaultlist = defaultlist[0:-1]
                    else:
                        break
                if optionals:
                    res = get_user_values(optionals)
            problem_list = macro_meth(app_config, meta_config, **res)
            if not isinstance(problem_list, list):
                raise ValueError(ERROR_RETURN_VALUE.format(macro_name))
            if problem_list:
                macro_problem_dict.update({macro_name: problem_list})
    return macro_problem_dict
Example #22
0
    def assertPublicAPISignatures(self, baseinst, inst):
        def get_public_apis(inst):
            methods = {}

            def findmethods(object):
                return inspect.ismethod(object) or inspect.isfunction(object)

            for (name, value) in inspect.getmembers(inst, findmethods):
                if name.startswith("_"):
                    continue
                methods[name] = value
            return methods

        baseclass = baseinst.__class__.__name__
        basemethods = get_public_apis(baseinst)
        implmethods = get_public_apis(inst)

        extranames = []
        for name in sorted(implmethods.keys()):
            if name not in basemethods:
                extranames.append(name)

        self.assertEqual([], extranames,
                         "public APIs not listed in base class %s" %
                         baseclass)

        for name in sorted(implmethods.keys()):
            baseargs = inspect.getargspec(basemethods[name])
            implargs = inspect.getargspec(implmethods[name])

            self.assertEqual(baseargs, implargs,
                             "%s args don't match base class %s" %
                             (name, baseclass))
Example #23
0
def is_simple_callable(obj):
    """
    True if the object is a callable that takes no arguments.
    """
    return (inspect.isfunction(obj) and not inspect.getargspec(obj)[0]) or (
        inspect.ismethod(obj) and len(inspect.getargspec(obj)[0]) <= 1
    )
Example #24
0
def test_api_methods_params(method_name):
  cluster = create_autospec(spec=Cluster, instance=True)
  # cant use mock here; need to inspect methods
  api = HookedAuroraClientAPI(cluster=cluster, user_agent="test-client")

  hooked_method = getattr(api, method_name)
  nonhooked_method = getattr(super(HookedAuroraClientAPI, api), method_name)
  api_method = getattr(super(NonHookedAuroraClientAPI, api), method_name)

  if method_name in API_METHODS_WITH_CONFIG_PARAM_ADDED:
    assert api_method != nonhooked_method
  assert nonhooked_method != hooked_method

  api_argspec = getargspec(api_method)
  hooked_argspec = getargspec(hooked_method)
  nonhooked_argspec = getargspec(nonhooked_method)

  if method_name in API_METHODS_WITH_CONFIG_PARAM_ADDED:
    assert api_argspec.varargs == nonhooked_argspec.varargs
    assert api_argspec.keywords == nonhooked_argspec.keywords
    assert len(api_argspec.args) + 1 == len(nonhooked_argspec.args)
    assert nonhooked_argspec.args[len(api_argspec.args)] == 'config'
    if api_argspec.defaults is None:
      assert len(nonhooked_argspec.defaults) == 1
      assert nonhooked_argspec.defaults[0] is None
    else:
      assert len(api_argspec.defaults) + 1 == len(nonhooked_argspec.defaults)
      assert nonhooked_argspec.defaults[len(api_argspec.defaults)] is None
  else:
    assert nonhooked_argspec == hooked_argspec
  assert nonhooked_argspec == nonhooked_argspec
Example #25
0
        def cached_solver(solver, neuron_type, gain, bias, x, targets,
                          rng=None, E=None):
            try:
                args, _, _, defaults = inspect.getargspec(solver)
            except TypeError:
                args, _, _, defaults = inspect.getargspec(solver.__call__)
            args = args[-len(defaults):]
            if rng is None and 'rng' in args:
                rng = defaults[args.index('rng')]
            if E is None and 'E' in args:
                E = defaults[args.index('E')]

            key = self._get_cache_key(
                solver_fn, solver, neuron_type, gain, bias, x, targets, rng, E)
            try:
                path, start, end = self._index[key]
                if self._fd is not None:
                    self._fd.flush()
                with open(path, 'rb') as f:
                    f.seek(start)
                    solver_info, decoders = nco.read(f)
            except:
                logger.debug("Cache miss [%s].", key)
                decoders, solver_info = solver_fn(
                    solver, neuron_type, gain, bias, x, targets, rng=rng, E=E)
                if not self.readonly:
                    fd = self._get_fd()
                    start = fd.tell()
                    nco.write(fd, solver_info, decoders)
                    end = fd.tell()
                    self._index[key] = (fd.name, start, end)
            else:
                logger.debug("Cache hit [%s]: Loaded stored decoders.", key)
            return decoders, solver_info
Example #26
0
def report_on_item_ourselves(name, value):
    lines = []
    if isfunction(value):
        lines.append(wrapargs('FUNCTION %s%s'%(name, formatargspec(*getargspec(value)))))
        lines.extend(report__doc__(value))
    elif ismethod(value):
        lines.append(wrapargs('METHOD %s%s'%(name, formatargspec(*getargspec(value)))))
        lines.extend(report__doc__(value))
    elif isgenerator(value):
        lines.append(wrapargs('GENERATOR %s%s'%(name, formatargspec(*getargspec(value)))))
        lines.extend(report__doc__(value))
    elif isclass(value):
        mro = getmro(value)
        supers = []
        for c in mro[1:]:
            supers.append(c.__name__)
        lines.append('CLASS %s(%s)'%(name, ', '.join(supers)))
        lines.extend(report__doc__(value))
        lines.append('')
        lines.append(wrapargs('METHOD %s.__init__%s'%(name, formatargspec(*getargspec(value.__init__)))))
        lines.extend(report__doc__(value.__init__))
        lines.append('')
        lines.extend(describe_contents(name, value))
    elif ismodule(value):
        lines.append('MODULE %s'%name)
        lines.extend(report__doc__(value))
        lines.append('')
        lines.extend(describe_contents(name, value))
    else:
        lines.append('SOMETHING ELSE: %s'%name)
        lines.append(value)
        lines.append('')
        lines.extend(describe_contents(name, value))
    return lines
Example #27
0
def curry(f):
    """
    Decorator to autocurry a function.

    >>> @curry
    ... def f(x,y,z):
    ...   return x+y+z

    >>> 6 ==  f(1,2,3) == f(1)(2,3) == f(1)(2)(3) == f(1)()(2)()(3)
    True
    """
    if isinstance(f, partial):
        num_args = len(inspect.getargspec(f.func)[0]) - len(f.args)
        args = f.args
        f = f.func
    else:
        num_args = len(inspect.getargspec(f)[0])
        args = []

    @wraps(f)
    def curry_(*a, **k):
        if len(a) == num_args:
            return f(*chain(args, a), **k)
        else:
            return curry(partial(f, *chain(args, a), **k))
    return curry_
Example #28
0
    def _runTask(self, *argv, **kwargv):

        """ 
        The method to run the decorated function _taskFun(). It is called through __call__() of
        the PypeTask object and it should never be called directly

        TODO: the arg porcessing is still a mess, need to find a better way to do this 
        """
        
        if PYTHONVERSION == (2,5):
            (args, varargs, varkw, defaults)  = inspect.getargspec(self._taskFun)
            #print  (args, varargs, varkw, defaults)
        else:
            argspec = inspect.getargspec(self._taskFun)
            (args, varargs, varkw, defaults) = argspec.args, argspec.varargs, argspec.keywords, argspec.defaults

        if varkw != None:
            return self._taskFun(self, *argv, **kwargv)
        elif varargs != None:
            return self._taskFun(self, *argv)
        elif len(args) != 0:
            nkwarg = {}
            if defaults != None:
                defaultArg = args[-len(defaults):]
                for a in defaultArg:
                    nkwarg[a] = kwargv[a]
                return self._taskFun(self, *argv, **nkwarg)
            else:
                return self._taskFun(self)
        else:
            return self._taskFun(self)
Example #29
0
    def on_dropped(self, drag_result):
        """ Called when the data has been dropped. """

        if self.handler is not None:
            if hasattr(self.handler, 'on_dropped'):
                # For backward compatibility we accept handler functions
                # with either 1 or 3 args, including self.  If there are
                # 3 args then we pass the data and the drag_result.
                args = inspect.getargspec(self.handler.on_dropped)[0]
                if len(args) == 3:
                    self.handler.on_dropped(clipboard.data, drag_result)
                else:
                    self.handler.on_dropped()
            else:
                #print self.handler

                # In this case we assume handler is a function.
                # For backward compatibility we accept handler functions
                # with either 0 or 2 args.  If there are 2 args then
                # we pass the data and drag_result
                args = inspect.getargspec(self.handler)[0]
                if len(args)==2:
                    self.handler(clipboard.data, drag_result)
                else:
                    self.handler()

        return
Example #30
0
def get_function_signature(function, method=True):
    wrapped = getattr(function, '_original_function', None)
    if wrapped is None:
        signature = inspect.getargspec(function)
    else:
        signature = inspect.getargspec(wrapped)
    defaults = signature.defaults
    if method:
        args = signature.args[1:]
    else:
        args = signature.args
    if defaults:
        kwargs = zip(args[-len(defaults):], defaults)
        args = args[:-len(defaults)]
    else:
        kwargs = []
    st = '%s.%s(' % (function.__module__, function.__name__)

    for a in args:
        st += str(a) + ', '
    for a, v in kwargs:
        if isinstance(v, str):
            v = '\'' + v + '\''
        st += str(a) + '=' + str(v) + ', '
    if kwargs or args:
        signature = st[:-2] + ')'
    else:
        signature = st + ')'

    if not method:
        # Prepend the module name.
        signature = function.__module__ + '.' + signature
    return post_process_signature(signature)
Example #31
0
def _validate_func_with_n_args(nargs, func):
    p = inspect.getargspec(func)
    if len(p.args) != nargs:
        raise ValueError("Expected func {x} to have {n} args. Got {y}".format(x=func.__name__, n=nargs, y=len(p.args)))
    return func
Example #32
0
def _is_path_fn(func):
    return (inspect.getargspec(func)[0] or [None]) == 'path'
Example #33
0
def _make_dispatcher(dispacther_cls, func, params_arity):
    argspec = inspect.getargspec(func)
    wrapper = functools.wraps(func)
    dispatcher = wrapper(dispacther_cls(argspec, params_arity))
    return dispatcher
Example #34
0
def CommandProvider(cls):
    """Class decorator to denote that it provides subcommands for Mach.

    When this decorator is present, mach looks for commands being defined by
    methods inside the class.
    """

    # The implementation of this decorator relies on the parse-time behavior of
    # decorators. When the module is imported, the method decorators (like
    # @Command and @CommandArgument) are called *before* this class decorator.
    # The side-effect of the method decorators is to store specifically-named
    # attributes on the function types. We just scan over all functions in the
    # class looking for the side-effects of the method decorators.

    # Tell mach driver whether to pass context argument to __init__.
    pass_context = False

    if inspect.ismethod(cls.__init__):
        spec = inspect.getargspec(cls.__init__)

        if len(spec.args) > 2:
            msg = 'Mach @CommandProvider class %s implemented incorrectly. ' + \
                  '__init__() must take 1 or 2 arguments. From %s'
            msg = msg % (cls.__name__, inspect.getsourcefile(cls))
            raise MachError(msg)

        if len(spec.args) == 2:
            pass_context = True

    seen_commands = set()

    # We scan __dict__ because we only care about the classes own attributes,
    # not inherited ones. If we did inherited attributes, we could potentially
    # define commands multiple times. We also sort keys so commands defined in
    # the same class are grouped in a sane order.
    for attr in sorted(cls.__dict__.keys()):
        value = cls.__dict__[attr]

        if not isinstance(value, types.FunctionType):
            continue

        command = getattr(value, '_mach_command', None)
        if not command:
            continue

        # Ignore subcommands for now: we handle them later.
        if command.subcommand:
            continue

        seen_commands.add(command.name)

        if not command.conditions and Registrar.require_conditions:
            continue

        msg = 'Mach command \'%s\' implemented incorrectly. ' + \
              'Conditions argument must take a list ' + \
              'of functions. Found %s instead.'

        if not isinstance(command.conditions, collections.Iterable):
            msg = msg % (command.name, type(command.conditions))
            raise MachError(msg)

        for c in command.conditions:
            if not hasattr(c, '__call__'):
                msg = msg % (command.name, type(c))
                raise MachError(msg)

        command.cls = cls
        command.method = attr
        command.pass_context = pass_context

        Registrar.register_command_handler(command)

    # Now do another pass to get sub-commands. We do this in two passes so
    # we can check the parent command existence without having to hold
    # state and reconcile after traversal.
    for attr in sorted(cls.__dict__.keys()):
        value = cls.__dict__[attr]

        if not isinstance(value, types.FunctionType):
            continue

        command = getattr(value, '_mach_command', None)
        if not command:
            continue

        # It is a regular command.
        if not command.subcommand:
            continue

        if command.name not in seen_commands:
            raise MachError('Command referenced by sub-command does not '
                            'exist: %s' % command.name)

        if command.name not in Registrar.command_handlers:
            continue

        command.cls = cls
        command.method = attr
        command.pass_context = pass_context
        parent = Registrar.command_handlers[command.name]

        if command.subcommand in parent.subcommand_handlers:
            raise MachError('sub-command already defined: %s' %
                            command.subcommand)

        parent.subcommand_handlers[command.subcommand] = command

    return cls
Example #35
0
def _Promote(arg, klass):
    """Wrap an argument in an object of the specified class.

  This is used to e.g.: promote numbers or strings to Images and arrays
  to Collections.

  Args:
    arg: The object to promote.
    klass: The expected type.

  Returns:
    The argument promoted if the class is recognized, otherwise the
    original argument.
  """
    if arg is None:
        return arg

    if klass == 'Image':
        return Image(arg)
    elif klass == 'Feature':
        if isinstance(arg, Collection):
            # TODO(user): Decide whether we want to leave this in. It can be
            #              quite dangerous on large collections.
            return ApiFunction.call_(
                'Feature', ApiFunction.call_('Collection.geometry', arg))
        else:
            return Feature(arg)
    elif klass == 'Element':
        if isinstance(arg, Element):
            # Already an Element.
            return arg
        elif isinstance(arg, Geometry):
            # Geometries get promoted to Features.
            return Feature(arg)
        elif isinstance(arg, ComputedObject):
            # Try a cast.
            return Element(arg.func, arg.args, arg.varName)
        else:
            # No way to convert.
            raise EEException('Cannot convert {0} to Element.'.format(arg))
    elif klass == 'Geometry':
        if isinstance(arg, Collection):
            return ApiFunction.call_('Collection.geometry', arg)
        else:
            return Geometry(arg)
    elif klass in ('FeatureCollection', 'Collection'):
        # For now Collection is synonymous with FeatureCollection.
        if isinstance(arg, Collection):
            return arg
        else:
            return FeatureCollection(arg)
    elif klass == 'ImageCollection':
        return ImageCollection(arg)
    elif klass == 'Filter':
        return Filter(arg)
    elif klass == 'Algorithm':
        if isinstance(arg, six.string_types):
            # An API function name.
            return ApiFunction.lookup(arg)
        elif callable(arg):
            # A native function that needs to be wrapped.
            args_count = len(inspect.getargspec(arg).args)
            return CustomFunction.create(arg, 'Object',
                                         ['Object'] * args_count)
        elif isinstance(arg, Encodable):
            # An ee.Function or a computed function like the return value of
            # Image.parseExpression().
            return arg
        else:
            raise EEException('Argument is not a function: {0}'.format(arg))
    elif klass == 'Dictionary':
        if isinstance(arg, dict):
            return arg
        else:
            return Dictionary(arg)
    elif klass == 'String':
        if (types.isString(arg) or isinstance(arg, ComputedObject)
                or isinstance(arg, String)):
            return String(arg)
        else:
            return arg
    elif klass == 'List':
        return List(arg)
    elif klass in ('Number', 'Float', 'Long', 'Integer', 'Short', 'Byte'):
        return Number(arg)
    elif klass in globals():
        cls = globals()[klass]
        ctor = ApiFunction.lookupInternal(klass)
        # Handle dynamically created classes.
        if isinstance(arg, cls):
            # Return unchanged.
            return arg
        elif ctor:
            # The client-side constructor will call the server-side constructor.
            return cls(arg)
        elif isinstance(arg, six.string_types):
            if hasattr(cls, arg):
                # arg is the name of a method in klass.
                return getattr(cls, arg)()
            else:
                raise EEException('Unknown algorithm: {0}.{1}'.format(
                    klass, arg))
        else:
            # Client-side cast.
            return cls(arg)
    else:
        return arg
Example #36
0
 def functionArgs(self,funcName):
     """ finds the arguments of a function from the extended class """
     func = getattr(self,funcName)
     args = inspect.getargspec(func)[0]
     del args[0] # removes 'self'
     return args
Example #37
0
 def numberOfFunctionArgs(self,funcName):
     """ finds the number of arguments of a function from the extended class """
     func = getattr(self,funcName)
     return len(inspect.getargspec(func)[0])-1
Example #38
0
 def __argspec(fn):
     # Support for Python 3 type hints requires inspect.getfullargspec
     if hasattr(inspect, 'getfullargspec'):
         return inspect.getfullargspec(fn)
     else:
         return inspect.getargspec(fn)
Example #39
0
from locale import getlocale, LC_TIME

try:
    import babel
except ImportError:
    babel = None
else:
    from babel import Locale
    from babel.core import LOCALE_ALIASES, UnknownLocaleError
    from babel.dates import (format_datetime as babel_format_datetime,
                             format_date as babel_format_date, format_time as
                             babel_format_time, get_datetime_format,
                             get_date_format, get_time_format, get_month_names,
                             get_period_names as babel_get_period_names,
                             get_day_names)
    if 'context' in inspect.getargspec(babel_get_period_names)[0]:

        def get_period_names(locale=None):
            return babel_get_period_names(context='format', locale=locale)
    else:
        get_period_names = babel_get_period_names

from trac.core import TracError
from trac.util.text import to_unicode, getpreferredencoding
from trac.util.translation import _, ngettext

# Date/time utilities

if os.name == 'nt':

    def _precise_now_windows():
 def hasParameterless(func_):
     argspec = _inspect.getargspec(func_)
     return any(argspec)
Example #41
0
 def __init__(self, func):
     self.func = func
     self.multiple_args = len(inspect.getargspec(func).args) > 1
Example #42
0
def function_args(fun):
    """Returns the name of :obj:`fun` arguments."""
    if hasattr(inspect, "getfullargspec"):
        return inspect.getfullargspec(fun).args
    return inspect.getargspec(fun).args  # pylint: disable=deprecated-method
    def dec(func):
        params, varargs, varkw, defaults = getargspec(func)

        class InclusionNode(TagHelperNode):
            def render(self, context):
                resolved_args, resolved_kwargs = self.get_resolved_arguments(
                    context)

                if takes_context:
                    args = [context] + resolved_args
                else:
                    args = resolved_args

                if cache_key_func:
                    cache_key = cache_key_func(*args)
                else:
                    cache_key = None

                if cache_key != None:
                    retVal = cache.get(cache_key)
                    if retVal:
                        return retVal

                _dict = func(*resolved_args, **resolved_kwargs)

                if not getattr(self, 'nodelist', False):
                    from django.template.loader import get_template, select_template
                    if isinstance(file_name, Template):
                        t = file_name
                    elif not isinstance(file_name,
                                        basestring) and is_iterable(file_name):
                        t = select_template(file_name)
                    else:
                        t = get_template(file_name)
                    self.nodelist = t.nodelist
                new_context = context_class(
                    _dict, **{
                        'autoescape': context.autoescape,
                        'current_app': context.current_app,
                        'use_l10n': context.use_l10n,
                        'use_tz': context.use_tz,
                    })
                # Copy across the CSRF token, if present, because
                # inclusion tags are often used for forms, and we need
                # instructions for using CSRF protection to be as simple
                # as possible.
                csrf_token = context.get('csrf_token', None)
                if csrf_token is not None:
                    new_context['csrf_token'] = csrf_token

                retVal = self.nodelist.render(new_context)
                if cache_key != None:
                    cache.set(cache_key, retVal, cache_time)
                return retVal

        function_name = (name or getattr(func, '_decorated_function',
                                         func).__name__)
        compile_func = partial(generic_tag_compiler,
                               params=params,
                               varargs=varargs,
                               varkw=varkw,
                               defaults=defaults,
                               name=function_name,
                               takes_context=takes_context,
                               node_class=InclusionNode)
        compile_func.__doc__ = func.__doc__
        self.tag(function_name, compile_func)
        return func
Example #44
0
def _argc(func):
    if IS_PY3:
        return len(inspect.signature(func).parameters)
    else:
        return len(inspect.getargspec(func).args)
Example #45
0
 def signature_or_spec(func):
     try:
         return inspect.getargspec(func)
     except TypeError:
         return None
Example #46
0
def getNumArgs(func):
    (args, varargs, varkw, defaults) = inspect.getargspec(func)
    return len(args)
Example #47
0
    def __init__(self, endpoint_):
        if self.var_pat is None:
            Route.var_pat = self.var_pat = re.compile(r'{(.+?)}')
        self.endpoint = endpoint_
        del endpoint_
        if not self.endpoint.route.startswith('/'):
            raise RouteError('A route must start with /, %s does not' % self.endpoint.route)
        parts = list(filter(None, self.endpoint.route.split('/')))
        matchers = self.matchers = []
        self.defaults = {}
        found_optional_part = False
        self.soak_up_extra = False
        self.type_checkers = self.endpoint.types.copy()

        def route_error(msg):
            return RouteError('%s is not valid: %s' % (self.endpoint.route, msg))

        for i, p in enumerate(parts):
            if p[0] == '{':
                if p[-1] != '}':
                    raise route_error('Invalid route, variable components must be in a {}')
                name = p[1:-1]
                is_sponge = name.startswith('+')
                if is_sponge:
                    if p is not parts[-1]:
                        raise route_error('Can only specify + in the last component')
                    name = name[1:]

                if '=' in name:
                    found_optional_part = i
                    name, default = name.partition('=')[::2]
                    if '{' in default or '}' in default:
                        raise route_error('The characters {} are not allowed in default values')
                    default = self.defaults[name] = eval(default)
                    if isinstance(default, numbers.Number):
                        self.type_checkers[name] = type(default)
                    if is_sponge and not isinstance(default, type('')):
                        raise route_error('Soak up path component must have a default value of string type')
                else:
                    if found_optional_part is not False:
                        raise route_error('Cannot have non-optional path components after optional ones')
                if is_sponge:
                    self.soak_up_extra = name
                matchers.append((name, True))
            else:
                if found_optional_part is not False:
                    raise route_error('Cannot have non-optional path components after optional ones')
                matchers.append((None, p.__eq__))
        self.names = [n for n, m in matchers if n is not None]
        self.all_names = frozenset(self.names)
        self.required_names = self.all_names - frozenset(self.defaults)
        if ispy3:
            argspec = inspect.getfullargspec(self.endpoint)
        else:
            argspec = inspect.getargspec(self.endpoint)
        if len(self.names) + 2 != len(argspec.args) - len(argspec.defaults or ()):
            raise route_error('Function must take %d non-default arguments' % (len(self.names) + 2))
        if argspec.args[2:len(self.names)+2] != self.names:
            raise route_error('Function\'s argument names do not match the variable names in the route')
        if not frozenset(self.type_checkers).issubset(frozenset(self.names)):
            raise route_error('There exist type checkers that do not correspond to route variables: %r' % (set(self.type_checkers) - set(self.names)))
        self.min_size = found_optional_part if found_optional_part is not False else len(matchers)
        self.max_size = sys.maxsize if self.soak_up_extra else len(matchers)
Example #48
0
def extract_runnable_dict(funwidget_list):
    """
    Extract a dictionary from a FunctionWidget list in the appropriate format to save as a python runnable.

    Parameters
    ----------
    funwidget_list : list of FunctionWidgets
        list of FunctionWidgets exposed in the UI workflow pipeline

    Returns
    -------
    dict
        dictionary specifying the workflow pipeline and important parameters
    """
    center_functions = {
        'find_center_pc': {
            'proj1': 'tomo[0]',
            'proj2': 'tomo[-1]'
        }
    }

    d = OrderedDict()
    func_dict = OrderedDict()
    subfuncs = OrderedDict()
    count = 1
    for f in funwidget_list:
        keywords = {}
        if not f.enabled or 'Reader' in f.name:
            continue

        func = "{}.{}".format(f.package, f._function.func_name)
        if 'xicam' in func:
            func = func.split(".")[-1]
        fpartial = f.partial
        for key, val in fpartial.keywords.iteritems():
            keywords[key] = val
        for arg in inspect.getargspec(f._function)[0]:
            if arg not in f.partial.keywords.iterkeys() or 'center' in arg:
                keywords[arg] = arg

        # get rid of degenerate keyword arguments
        if 'arr' in keywords and 'tomo' in keywords:
            keywords['tomo'] = keywords['arr']
            keywords.pop('arr', None)

        # special cases for the 'write' function
        if 'start' in keywords:
            keywords['start'] = 'start'
        if 'Write' in f.name:
            keywords.pop('parent folder', None)
            keywords.pop('folder name', None)
            keywords.pop('file name', None)

        if 'Reconstruction' in f.name:
            for param, ipf in f.input_functions.iteritems():
                if 'theta' in param or 'center' in param:
                    subfunc = "{}.{}(".format(ipf.package,
                                              ipf._function.func_name)
                    for key, val in ipf.partial.keywords.iteritems():
                        subfunc += "{}={},".format(key, val) if not isinstance(val, str) \
                            else '{}=\'{}\','.format(key, val)
                    for cor_func in center_functions.iterkeys():
                        if ipf._function.func_name in cor_func:
                            for k, v in center_functions[cor_func].iteritems():
                                subfunc += "{}={},".format(k, v)
                    subfunc += ")"
                    subfuncs[param] = subfunc
            if 'astra' in keywords['algorithm']:
                keywords['algorithm'] = 'tomopy.astra'

        func_dict[str(count) + ". " + func] = keywords
        count += 1

    d['func'] = func_dict
    d['subfunc'] = subfuncs
    return d
Example #49
0
    def build(func, node_class):
        """
        Build a Process from the given function.

        All function arguments will be assigned as process inputs. If keyword arguments are specified then
        these will also become inputs.

        :param func: The function to build a process from
        :type func: callable

        :param node_class: Provide a custom node class to be used, has to be constructable with no arguments. It has to
            be a sub class of `ProcessNode` and the mixin :class:`~aiida.orm.utils.mixins.FunctionCalculationMixin`.
        :type node_class: :class:`aiida.orm.nodes.process.process.ProcessNode`

        :return: A Process class that represents the function
        :rtype: :class:`FunctionProcess`
        """
        from aiida import orm
        from aiida.orm import ProcessNode
        from aiida.orm.utils.mixins import FunctionCalculationMixin

        if not issubclass(node_class, ProcessNode) or not issubclass(
                node_class, FunctionCalculationMixin):
            raise TypeError(
                'the node_class should be a sub class of `ProcessNode` and `FunctionCalculationMixin`'
            )

        if PY2:
            args, varargs, keywords, defaults = inspect.getargspec(func)  # pylint: disable=deprecated-method
        else:
            args, varargs, keywords, defaults, _, _, _ = inspect.getfullargspec(
                func)  # pylint: disable=no-member
        nargs = len(args)
        ndefaults = len(defaults) if defaults else 0
        first_default_pos = nargs - ndefaults

        if varargs is not None:
            raise ValueError('variadic arguments are not supported')

        def _define(cls, spec):
            """Define the spec dynamically"""
            super(FunctionProcess, cls).define(spec)

            for i, arg in enumerate(args):
                default = ()
                if i >= first_default_pos:
                    default = defaults[i - first_default_pos]

                # If the keyword was already specified, simply override the default
                if spec.has_input(arg):
                    spec.inputs[arg].default = default
                else:
                    # If the default is `None` make sure that the port also accepts a `NoneType`
                    # Note that we cannot use `None` because the validation will call `isinstance` which does not work
                    # when passing `None`, but it does work with `NoneType` which is returned by calling `type(None)`
                    if default is None:
                        valid_type = (orm.Data, type(None))
                    else:
                        valid_type = (orm.Data, )

                    spec.input(arg, valid_type=valid_type, default=default)

            # Set defaults for label and description based on function name and docstring, if not explicitly defined
            port_label = spec.inputs['metadata']['label']

            if not port_label.has_default():
                port_label.default = func.__name__

            # If the function support kwargs then allow dynamic inputs, otherwise disallow
            spec.inputs.dynamic = keywords is not None

            # Function processes must have a dynamic output namespace since we do not know beforehand what outputs
            # will be returned and the valid types for the value should be `Data` nodes as well as a dictionary because
            # the output namespace can be nested.
            spec.outputs.valid_type = (orm.Data, dict)

        return type(
            func.__name__, (FunctionProcess, ), {
                '__module__': func.__module__,
                '__name__': func.__name__,
                '_func': staticmethod(func),
                Process.define.__name__: classmethod(_define),
                '_func_args': args,
                '_node_class': node_class
            })
Example #50
0
 def get_args_list(cls, method):
     '''Get argument list from wrapped class.'''
     return inspect.getargspec(method).args
def train_PG(
        exp_name='',
        env_name='CartPole-v0',
        n_iter=100,
        gamma=1.0,
        min_timesteps_per_batch=1000,
        max_path_length=None,
        learning_rate=5e-3,
        reward_to_go=True,
        animate=True,
        logdir=None,
        normalize_advantages=True,
        nn_baseline=False,
        seed=0,
        # network arguments
        n_layers=1,
        size=32):

    start = time.time()

    # Configure output directory for logging
    logz.configure_output_dir(logdir)

    # Log experimental parameters
    args = inspect.getargspec(train_PG)[0]
    locals_ = locals()
    params = {k: locals_[k] if k in locals_ else None for k in args}
    logz.save_params(params)

    # Set random seeds
    tf.set_random_seed(seed)
    np.random.seed(seed)

    # Make the gym environment
    env = gym.make(env_name)

    # Is this env continuous, or discrete?
    discrete = isinstance(env.action_space, gym.spaces.Discrete)

    # Maximum length for episodes
    max_path_length = max_path_length or env.spec.max_episode_steps

    #========================================================================================#
    # Notes on notation:
    #
    # Symbolic variables have the prefix sy_, to distinguish them from the numerical values
    # that are computed later in the function
    #
    # Prefixes and suffixes:
    # ob - observation
    # ac - action
    # _no - this tensor should have shape (batch size /n/, observation dim)
    # _na - this tensor should have shape (batch size /n/, action dim)
    # _n  - this tensor should have shape (batch size /n/)
    #
    # Note: batch size /n/ is defined at runtime, and until then, the shape for that axis
    # is None
    #========================================================================================#

    # Observation and action sizes
    ob_dim = env.observation_space.shape[0]
    ac_dim = env.action_space.n if discrete else env.action_space.shape[0]

    #========================================================================================#
    #                           ----------SECTION 4----------
    # Placeholders
    #
    # Need these for batch observations / actions / advantages in policy gradient loss function.
    #========================================================================================#

    sy_ob_no = tf.placeholder(shape=[None, ob_dim],
                              name="ob",
                              dtype=tf.float32)
    if discrete:
        sy_ac_na = tf.placeholder(shape=[None], name="ac", dtype=tf.int32)
    else:
        sy_ac_na = tf.placeholder(shape=[None, ac_dim],
                                  name="ac",
                                  dtype=tf.float32)

    # Define a placeholder for advantages
    sy_adv_n = tf.placeholder(shape=[None], name="adv", dtype=tf.float32)

    #========================================================================================#
    #                           ----------SECTION 4----------
    # Networks
    #
    # Make symbolic operations for
    #   1. Policy network outputs which describe the policy distribution.
    #       a. For the discrete case, just logits for each action.
    #
    #       b. For the continuous case, the mean / log std of a Gaussian distribution over
    #          actions.
    #
    #      Hint: use the 'build_mlp' function you defined in utilities.
    #
    #      Note: these ops should be functions of the placeholder 'sy_ob_no'
    #
    #   2. Producing samples stochastically from the policy distribution.
    #       a. For the discrete case, an op that takes in logits and produces actions.
    #
    #          Should have shape [None]
    #
    #       b. For the continuous case, use the reparameterization trick:
    #          The output from a Gaussian distribution with mean 'mu' and std 'sigma' is
    #
    #               mu + sigma * z,         z ~ N(0, I)
    #
    #          This reduces the problem to just sampling z. (Hint: use tf.random_normal!)
    #
    #          Should have shape [None, ac_dim]
    #
    #      Note: these ops should be functions of the policy network output ops.
    #
    #   3. Computing the log probability of a set of actions that were actually taken,
    #      according to the policy.
    #
    #      Note: these ops should be functions of the placeholder 'sy_ac_na', and the
    #      policy network output ops.
    #
    #========================================================================================#

    if discrete:
        sy_logits_na = build_mlp(
            input_placeholder=sy_ob_no,
            output_size=ac_dim,
            scope="build_nn",
            n_layers=n_layers,
            size=size,
            activation=tf.nn.relu)  # Avoid softmax on output layer,
        # as we are going to use softmax_cross_entropy for logprob.
        # See https://www.tensorflow.org/api_docs/python/tf/nn/
        # softmax_cross_entropy_with_logits
        # [Learn] Use tf.multinomial
        # [Learn] Use tf.squeeze to get rid of dimension=1
        sy_sampled_ac = tf.squeeze(tf.multinomial(sy_logits_na, 1), axis=[1])
        # [Learn] sparse_softmax_cross_entropy_with_logits
        #   skip one hot encoding in softmax_cross_entropy
        sy_logprob_n = tf.nn.sparse_softmax_cross_entropy_with_logits(
            labels=sy_ac_na, logits=sy_logits_na)

    else:
        sy_mean = build_mlp(input_placeholder=sy_ob_no,
                            output_size=ac_dim,
                            scope="build_nn",
                            n_layers=n_layers,
                            size=size,
                            activation=tf.nn.relu)

        # logstd should just be a trainable variable, not a network output??
        sy_logstd = tf.get_variable("logstd", shape=[ac_dim])
        sy_sampled_ac = sy_mean + tf.multiply(
            tf.exp(sy_logstd), tf.random_normal(tf.shape(sy_mean)))
        # Hint: Use the log probability under a multivariate gaussian.
        # Learned from github.com/mwhittaker
        # [Learn] MultivariateNormalDiag in tensorflow library
        dist = tf.contrib.distributions.MultivariateNormalDiag(
            loc=sy_mean, scale_diag=tf.exp(sy_logstd))
        sy_logprob_n = -dist.log_prob(sy_ac_na)
        # [Learn] Do the negate here works;
        #   Do it in loss / weighted_negative_likelihood not work.

    #========================================================================================#
    #                           ----------SECTION 4----------
    # Loss Function and Training Operation
    #========================================================================================#

    # [Learn] Multiply log by Q value, in differentiation, get \nabla log * Q.
    weighted_negative_likelihood = tf.multiply(sy_logprob_n, sy_adv_n)
    loss = tf.reduce_mean(weighted_negative_likelihood)
    update_op = tf.train.AdamOptimizer(learning_rate).minimize(loss)

    #========================================================================================#
    #                           ----------SECTION 5----------
    # Optional Baseline
    #========================================================================================#

    if nn_baseline:
        baseline_prediction = tf.squeeze(
            build_mlp(input_placeholder=sy_ob_no,
                      output_size=1,
                      scope="nn_baseline",
                      n_layers=n_layers,
                      size=size))
        # Define placeholders for targets, a loss function and an update op for fitting a
        # neural network baseline. These will be used to fit the neural network baseline.
        baseline_target = tf.placeholder(shape=[None], dtype=tf.float32)
        baseline_loss = tf.losses.mean_squared_error(
            predictions=baseline_prediction, labels=baseline_target)
        baseline_update_op = tf.train.AdamOptimizer(learning_rate).minimize(
            baseline_loss)

    #========================================================================================#
    # Tensorflow Engineering: Config, Session, Variable initialization
    #========================================================================================#

    tf_config = tf.ConfigProto(inter_op_parallelism_threads=1,
                               intra_op_parallelism_threads=1)

    sess = tf.Session(config=tf_config)
    sess.__enter__()  # equivalent to `with sess:`
    tf.global_variables_initializer().run()  #pylint: disable=E1101

    #========================================================================================#
    # Training Loop
    #========================================================================================#

    total_timesteps = 0

    for itr in range(n_iter):
        print("********** Iteration %i ************" % itr)

        # Collect paths until we have enough timesteps
        timesteps_this_batch = 0
        paths = []
        while True:
            ob = env.reset()
            obs, acs, rewards = [], [], []
            animate_this_episode = (len(paths) == 0 and (itr % 10 == 0)
                                    and animate)
            steps = 0
            while True:
                if animate_this_episode:
                    env.render()
                    time.sleep(0.05)
                obs.append(ob)
                ac = sess.run(sy_sampled_ac, feed_dict={sy_ob_no: ob[None]})
                ac = ac[0]
                acs.append(ac)
                ob, rew, done, _ = env.step(ac)
                rewards.append(rew)
                steps += 1
                if done or steps > max_path_length:
                    break
            path = {
                "observation": np.array(obs),
                "reward": np.array(rewards),
                "action": np.array(acs)
            }
            paths.append(path)
            timesteps_this_batch += pathlength(path)
            if timesteps_this_batch > min_timesteps_per_batch:
                break
        total_timesteps += timesteps_this_batch

        # Build arrays for observation, action for the policy gradient update by concatenating
        # across paths
        ob_no = np.concatenate([path["observation"] for path in paths])
        ac_na = np.concatenate([path["action"] for path in paths])

        #====================================================================================#
        #                           ----------SECTION 4----------
        # Computing Q-values
        #
        # Your code should construct numpy arrays for Q-values which will be used to compute
        # advantages (which will in turn be fed to the placeholder you defined above).
        #
        # Recall that the expression for the policy gradient PG is
        #
        #       PG = E_{tau} [sum_{t=0}^T grad log pi(a_t|s_t) * (Q_t - b_t )]
        #
        # where
        #
        #       tau=(s_0, a_0, ...) is a trajectory,
        #       Q_t is the Q-value at time t, Q^{pi}(s_t, a_t),
        #       and b_t is a baseline which may depend on s_t.
        #
        # You will write code for two cases, controlled by the flag 'reward_to_go':
        #
        #   Case 1: trajectory-based PG
        #
        #       (reward_to_go = False)
        #
        #       Instead of Q^{pi}(s_t, a_t), we use the total discounted reward summed over
        #       entire trajectory (regardless of which time step the Q-value should be for).
        #
        #       For this case, the policy gradient estimator is
        #
        #           E_{tau} [sum_{t=0}^T grad log pi(a_t|s_t) * Ret(tau)]
        #
        #       where
        #
        #           Ret(tau) = sum_{t'=0}^T gamma^t' r_{t'}.
        #
        #       Thus, you should compute
        #
        #           Q_t = Ret(tau)
        #
        #   Case 2: reward-to-go PG
        #
        #       (reward_to_go = True)
        #
        #       Here, you estimate Q^{pi}(s_t, a_t) by the discounted sum of rewards starting
        #       from time step t. Thus, you should compute
        #
        #           Q_t = sum_{t'=t}^T gamma^(t'-t) * r_{t'}
        #
        #
        # Store the Q-values for all timesteps and all trajectories in a variable 'q_n',
        # like the 'ob_no' and 'ac_na' above.
        #
        #====================================================================================#

        def discount_rewards_to_go(rewards, gamma):
            res = []
            future_reward = 0
            for r in reversed(rewards):
                future_reward = future_reward * gamma + r
                res.append(future_reward)
            return res[::-1]

        def sum_discount_rewards(rewards, gamma):
            return sum((gamma**i) * rewards[i] for i in range(len(rewards)))

        q_n = []
        if reward_to_go:
            q_n = np.concatenate([
                discount_rewards_to_go(path["reward"], gamma) for path in paths
            ])
        else:
            q_n = np.concatenate(
                [[sum_discount_rewards(path["reward"], gamma)] *
                 pathlength(path) for path in paths])

        #====================================================================================#
        #                           ----------SECTION 5----------
        # Computing Baselines
        #====================================================================================#
        if nn_baseline and itr > 0:
            # If nn_baseline is True, use your neural network to predict reward-to-go
            # at each timestep for each trajectory, and save the result in a variable 'b_n'
            # like 'ob_no', 'ac_na', and 'q_n'.
            #
            # Hint #bl1: rescale the output from the nn_baseline to match the statistics
            # (mean and std) of the current or previous batch of Q-values. (Goes with Hint
            # #bl2 below.)
            b_n = sess.run(baseline_prediction, feed_dict={sy_ob_no: ob_no})
            b_n = (b_n -
                   np.mean(b_n)) / np.std(b_n) * np.std(q_n) + np.mean(q_n)
            adv_n = q_n - b_n
        else:
            adv_n = q_n.copy()

        #====================================================================================#
        #                           ----------SECTION 4----------
        # Advantage Normalization
        #====================================================================================#

        if normalize_advantages:
            # On the next line, implement a trick which is known empirically to reduce variance
            # in policy gradient methods: normalize adv_n to have mean zero and std=1.
            adv_n = (adv_n - np.mean(adv_n)) / np.std(adv_n)

        #====================================================================================#
        #                           ----------SECTION 5----------
        # Optimizing Neural Network Baseline
        #====================================================================================#
        if nn_baseline:
            # ----------SECTION 5----------
            # If a neural network baseline is used, set up the targets and the inputs for the
            # baseline.
            #
            # Fit it to the current batch in order to use for the next iteration. Use the
            # baseline_update_op you defined earlier.
            #
            # Hint #bl2: Instead of trying to target raw Q-values directly, rescale the
            # targets to have mean zero and std=1. (Goes with Hint #bl1 above.)
            scaled_q = (q_n - np.mean(q_n)) / np.std(q_n)
            _ = sess.run(baseline_update_op,
                         feed_dict={
                             sy_ob_no: ob_no,
                             baseline_target: scaled_q
                         })

        #====================================================================================#
        #                           ----------SECTION 4----------
        # Performing the Policy Update
        #====================================================================================#

        # Call the update operation necessary to perform the policy gradient update based on
        # the current batch of rollouts.
        #
        # For debug purposes, you may wish to save the value of the loss function before
        # and after an update, and then log them below.

        # [Learn] Tensorflow fetches in session.run
        # https://www.tensorflow.org/versions/r0.12/api_docs/python/client/session_management#Session.run
        _, loss_value = sess.run([update_op, loss],
                                 feed_dict={
                                     sy_ob_no: ob_no,
                                     sy_ac_na: ac_na,
                                     sy_adv_n: adv_n
                                 })

        # Log diagnostics
        returns = [path["reward"].sum() for path in paths]
        ep_lengths = [pathlength(path) for path in paths]
        logz.log_tabular("Time", time.time() - start)
        logz.log_tabular("Iteration", itr)
        logz.log_tabular("AverageReturn", np.mean(returns))
        logz.log_tabular("StdReturn", np.std(returns))
        logz.log_tabular("MaxReturn", np.max(returns))
        logz.log_tabular("MinReturn", np.min(returns))
        logz.log_tabular("EpLenMean", np.mean(ep_lengths))
        logz.log_tabular("EpLenStd", np.std(ep_lengths))
        logz.log_tabular("TimestepsThisBatch", timesteps_this_batch)
        logz.log_tabular("TimestepsSoFar", total_timesteps)
        logz.log_tabular("Loss", loss_value)
        logz.dump_tabular()
        logz.pickle_tf_vars()
Example #52
0
def _ParseTransformDocString(func):
    """Parses the doc string for func.

  Args:
    func: The doc string will be parsed from this function.

  Returns:
    A (description, prototype, args) tuple:
      description - The function description.
      prototype - The function prototype string.
      args - A list of (name, description) tuples, one tuple for each arg.

  Example transform function docstring parsed by this method:
    '''Transform description. Example sections optional.

    These lines are skipped.
    Another skipped line.

    Args:
      r: The resource arg is always sepcified but omitted from the docs.
      arg-2-name[=default-2]: The description for arg-2-name.
      arg-N-name[=default-N]: The description for arg-N-name.
      kwargs: Omitted from the description.

    Example:
      One or more example lines for the 'For example:' section.
    '''
  """
    hidden_args = ('kwargs', 'projection', 'r')
    if not func.__doc__:
        return '', '', '', ''
    description, _, doc = func.__doc__.partition('\n')
    collect = _DOC_MAIN
    arg = None
    descriptions = [description]
    example = []
    args = []
    arg_description = []
    paragraph = False
    for line in textwrap.dedent(doc).split('\n'):
        if not line:
            paragraph = True
        elif line == 'Args:':
            # Now collecting Args: section lines.
            collect = _DOC_ARGS
            paragraph = False
        elif line == 'Example:':
            # Now collecting Example: section lines.
            collect = _DOC_EXAMPLE
            paragraph = False
        elif collect == _DOC_SKIP:
            # Not interested in this line.
            continue
        elif collect == _DOC_MAIN:
            # The main description line.
            paragraph = _AppendLine(descriptions, line, paragraph)
        elif collect == _DOC_ARGS and line.startswith('    '):
            # An arg description line.
            paragraph = _AppendLine(arg_description, line, paragraph)
        elif collect == _DOC_EXAMPLE and line.startswith('  '):
            # An example description line.
            paragraph = _AppendLine(example, line[2:], paragraph)
        else:
            # The current arg description is done.
            if arg:
                arg = _StripUnusedNotation(arg)
            if arg and arg not in hidden_args:
                args.append((arg, ' '.join(arg_description)))
            if not line.startswith(' ') and line.endswith(':'):
                # The start of a new section.
                collect = _DOC_SKIP
                continue
            # A new arg description.
            arg, _, text = line.partition(':')
            arg = arg.strip()
            arg = arg.lstrip('*')
            arg_description = [text.strip()]

    # Collect the formal arg list with defaults for the function prototype.
    import inspect  # pylint: disable=g-import-not-at-top, For startup efficiency.
    argspec = inspect.getargspec(func)
    default_index_start = len(argspec.args) - len(argspec.defaults or [])
    formals = []
    for formal_index, formal in enumerate(argspec.args):
        if formal:
            formal = _StripUnusedNotation(formal)
        if formal in hidden_args:
            continue
        default_index = formal_index - default_index_start
        default = argspec.defaults[
            default_index] if default_index >= 0 else None
        if default is not None:
            default_display = repr(default).replace("'", '"')
            # Trim off the unicode 'u'.
            if default_display.startswith('u"'):
                default_display = default_display[1:]
            if default_display == 'False':
                default_display = 'false'
            elif default_display == 'True':
                default_display = 'true'
            formals.append('{formal}={default_display}'.format(
                formal=formal, default_display=default_display))
        else:
            formals.append(formal)
    if argspec.varargs:
        formals.append(argspec.varargs)
    prototype = '({formals})'.format(formals=', '.join(formals))

    return ''.join(descriptions), prototype, args, example
Example #53
0
    def RunCmd(
        self,
        command,
        compReg=True,
        env=None,
        skipInterface=False,
        onDone=None,
        onPrepare=None,
        userData=None,
        addLayer=None,
        notification=Notification.MAKE_VISIBLE,
    ):
        """Run command typed into console command prompt (GPrompt).

        .. todo::
            Document the other event.
        .. todo::
            Solve problem with the other event (now uses gOutputText
            event but there is no text, use onPrepare handler instead?)
        .. todo::
            Skip interface is ignored and determined always automatically.

        Posts event EVT_IGNORED_CMD_RUN when command which should be ignored
        (according to ignoredCmdPattern) is run.
        For example, see layer manager which handles d.* on its own.

        :param command: command given as a list (produced e.g. by utils.split())
        :param compReg: True use computation region
        :param notification: form of notification
        :param bool skipInterface: True to do not launch GRASS interface
                                   parser when command has no arguments
                                   given
        :param onDone: function to be called when command is finished
        :param onPrepare: function to be called before command is launched
        :param addLayer: to be passed in the mapCreated signal
        :param userData: data defined for the command
        """
        if len(command) == 0:
            Debug.msg(2, "GPrompt:RunCmd(): empty command")
            return

        # update history file
        self.UpdateHistoryFile(" ".join(command))

        if command[0] in globalvar.grassCmd:
            # send GRASS command without arguments to GUI command interface
            # except ignored commands (event is emitted)
            if (
                self._ignoredCmdPattern
                and re.compile(self._ignoredCmdPattern).search(" ".join(command))
                and "--help" not in command
                and "--ui" not in command
            ):
                event = gIgnoredCmdRun(cmd=command)
                wx.PostEvent(self, event)
                return

            else:
                # other GRASS commands (r|v|g|...)
                try:
                    task = GUI(show=None).ParseCommand(command)
                except GException as e:
                    GError(parent=self._guiparent, message=str(e), showTraceback=False)
                    return

                hasParams = False
                if task:
                    options = task.get_options()
                    hasParams = options["params"] and options["flags"]
                    # check for <input>=-
                    for p in options["params"]:
                        if (
                            p.get("prompt", "") == "input"
                            and p.get("element", "") == "file"
                            and p.get("age", "new") == "old"
                            and p.get("value", "") == "-"
                        ):
                            GError(
                                parent=self._guiparent,
                                message=_(
                                    "Unable to run command:\n%(cmd)s\n\n"
                                    "Option <%(opt)s>: read from standard input is not "
                                    "supported by wxGUI"
                                )
                                % {"cmd": " ".join(command), "opt": p.get("name", "")},
                            )
                            return

                if len(command) == 1:
                    if command[0].startswith("g.gui."):
                        import imp
                        import inspect

                        pyFile = command[0]
                        if sys.platform == "win32":
                            pyFile += ".py"
                        pyPath = os.path.join(os.environ["GISBASE"], "scripts", pyFile)
                        if not os.path.exists(pyPath):
                            pyPath = os.path.join(
                                os.environ["GRASS_ADDON_BASE"], "scripts", pyFile
                            )
                        if not os.path.exists(pyPath):
                            GError(
                                parent=self._guiparent,
                                message=_("Module <%s> not found.") % command[0],
                            )
                        pymodule = imp.load_source(command[0].replace(".", "_"), pyPath)
                        try:  # PY3
                            pymain = inspect.getfullargspec(pymodule.main)
                        except AttributeError:
                            pymain = inspect.getargspec(pymodule.main)
                        if pymain and "giface" in pymain.args:
                            pymodule.main(self._giface)
                            return

                    # no arguments given
                    if hasParams and not isinstance(self._guiparent, FormNotebook):
                        # also parent must be checked, see #3135 for details
                        try:
                            GUI(
                                parent=self._guiparent, giface=self._giface
                            ).ParseCommand(command)
                        except GException as e:
                            print(e, file=sys.stderr)

                        return

                if env:
                    env = env.copy()
                else:
                    env = os.environ.copy()
                # activate computational region (set with g.region)
                # for all non-display commands.
                if compReg and "GRASS_REGION" in env:
                    del env["GRASS_REGION"]

                # process GRASS command with argument
                self.cmdThread.RunCmd(
                    command,
                    stdout=self.cmdStdOut,
                    stderr=self.cmdStdErr,
                    onDone=onDone,
                    onPrepare=onPrepare,
                    userData=userData,
                    addLayer=addLayer,
                    env=env,
                    notification=notification,
                )
                self.cmdOutputTimer.Start(50)

                # we don't need to change computational region settings
                # because we work on a copy
        else:
            # Send any other command to the shell. Send output to
            # console output window
            #
            # Check if the script has an interface (avoid double-launching
            # of the script)

            # check if we ignore the command (similar to grass commands part)
            if self._ignoredCmdPattern and re.compile(self._ignoredCmdPattern).search(
                " ".join(command)
            ):
                event = gIgnoredCmdRun(cmd=command)
                wx.PostEvent(self, event)
                return

            skipInterface = True
            if os.path.splitext(command[0])[1] in (".py", ".sh"):
                try:
                    sfile = open(command[0], "r")
                    for line in sfile.readlines():
                        if len(line) < 2:
                            continue
                        if line[0] == "#" and line[1] == "%":
                            skipInterface = False
                            break
                    sfile.close()
                except IOError:
                    pass

            if len(command) == 1 and not skipInterface:
                try:
                    task = gtask.parse_interface(command[0])
                except:
                    task = None
            else:
                task = None

            if task:
                # process GRASS command without argument
                GUI(parent=self._guiparent, giface=self._giface).ParseCommand(command)
            else:
                self.cmdThread.RunCmd(
                    command,
                    stdout=self.cmdStdOut,
                    stderr=self.cmdStdErr,
                    onDone=onDone,
                    onPrepare=onPrepare,
                    userData=userData,
                    addLayer=addLayer,
                    env=env,
                    notification=notification,
                )
            self.cmdOutputTimer.Start(50)
#!/usr/bin/env python3
"""Print information about the arguments to a method.
"""

#end_pymotw_header

import inspect
import example

spec = inspect.getargspec(example.module_level_function)
print(spec)
print(inspect.formatargspec(spec))
Example #55
0
    def connect(self, receiver, sender=None, weak=True, dispatch_uid=None):
        """
        Connect receiver to sender for signal.

        Arguments:

            receiver
                A function or an instance method which is to receive signals.
                Receivers must be hashable objects.

                if weak is True, then receiver must be weak-referencable (more
                precisely saferef.safeRef() must be able to create a reference
                to the receiver).

                Receivers must be able to accept keyword arguments.

                If receivers have a dispatch_uid attribute, the receiver will
                not be added if another receiver already exists with that
                dispatch_uid.

            sender
                The sender to which the receiver should respond Must either be
                of type Signal, or None to receive events from any sender.

            weak
                Whether to use weak references to the receiver By default, the
                module will attempt to use weak references to the receiver
                objects. If this parameter is false, then strong references will
                be used.

            dispatch_uid
                An identifier used to uniquely identify a particular instance of
                a receiver. This will usually be a string, though it may be
                anything hashable.
        """
        # If debugging is on, check that we got a good receiver
        if self._debugging:
            import inspect
            assert callable(receiver), "Signal receivers must be callable."

            # Check for **kwargs
            # Not all callables are inspectable with getargspec, so we'll
            # try a couple different ways but in the end fall back on assuming
            # it is -- we don't want to prevent registration of valid but weird
            # callables.
            try:
                argspec = inspect.getargspec(receiver)
            except TypeError:
                try:
                    argspec = inspect.getargspec(receiver.__call__)
                except (TypeError, AttributeError):
                    argspec = None
            if argspec:
                assert argspec[2] is not None, \
                    "Signal receivers must accept keyword arguments (**kwargs)."

        if dispatch_uid:
            lookup_key = (dispatch_uid, _make_id(sender))
        else:
            lookup_key = (_make_id(receiver), _make_id(sender))

        if weak:
            receiver = saferef.safeRef(receiver, onDelete=self._remove_receiver)

        for r_key, _ in self.receivers:
            if r_key == lookup_key:
                break
        else:
            self.receivers.append((lookup_key, receiver))
Example #56
0
def _do_inv_for_realhost(host_config, sources, multi_host_sections, hostname,
                         ipaddress, inventory_tree, status_data_tree):
    # type: (config.HostConfig, data_sources.DataSources, Optional[data_sources.MultiHostSections], HostName, Optional[HostAddress], StructuredDataTree, StructuredDataTree) -> None
    for source in sources.get_data_sources():
        if isinstance(source, data_sources.SNMPDataSource):
            source.set_on_error("raise")
            source.set_do_snmp_scan(True)
            source.disable_data_source_cache()
            source.set_use_snmpwalk_cache(False)
            source.set_ignore_check_interval(True)
            source.set_check_plugin_name_filter(
                _gather_snmp_check_plugin_names_inventory)
            if multi_host_sections is not None:
                # Status data inventory already provides filled multi_host_sections object.
                # SNMP data source: If 'do_status_data_inv' is enabled there may be
                # sections for inventory plugins which were not fetched yet.
                source.enforce_check_plugin_names(None)
                host_sections = multi_host_sections.add_or_get_host_sections(
                    hostname, ipaddress, deflt=SNMPHostSections())
                source.set_fetched_check_plugin_names(
                    set(host_sections.sections.keys()))
                host_sections_from_source = source.run()
                host_sections.update(host_sections_from_source)

    if multi_host_sections is None:
        multi_host_sections = sources.get_host_sections()

    console.step("Executing inventory plugins")
    import cmk.base.inventory_plugins as inventory_plugins
    console.verbose("Plugins:")
    for section_name, plugin in inventory_plugins.sorted_inventory_plugins():
        section_content = multi_host_sections.get_section_content(
            hostname, ipaddress, section_name, for_discovery=False)
        # TODO: Don't we need to take config.check_info[check_plugin_name]["handle_empty_info"]:
        #       like it is done in checking.execute_check()? Standardize this!
        if not section_content:  # section not present (None or [])
            # Note: this also excludes existing sections without info..
            continue

        if all([x in [[], {}, None] for x in section_content]):
            # Inventory plugins which get parsed info from related
            # check plugin may have more than one return value, eg
            # parse function of oracle_tablespaces returns ({}, {})
            continue

        console.verbose(" %s%s%s%s" %
                        (tty.green, tty.bold, section_name, tty.normal))

        # Inventory functions can optionally have a second argument: parameters.
        # These are configured via rule sets (much like check parameters).
        inv_function = plugin["inv_function"]
        inv_function_args = inspect.getargspec(inv_function).args

        kwargs = {}
        for dynamic_arg_name, dynamic_arg_value in [
            ("inventory_tree", inventory_tree),
            ("status_data_tree", status_data_tree),
        ]:
            if dynamic_arg_name in inv_function_args:
                inv_function_args.remove(dynamic_arg_name)
                kwargs[dynamic_arg_name] = dynamic_arg_value

        if len(inv_function_args) == 2:
            params = host_config.inventory_parameters(section_name)
            args = [section_content, params]
        else:
            args = [section_content]
        inv_function(*args, **kwargs)
    console.verbose("\n")
Example #57
0
def get_arguments(func):
    args = inspect.getargspec(func)
    return args[0]
Example #58
0
def find_MAP(
    start=None,
    vars=None,
    method="L-BFGS-B",
    return_raw=False,
    include_transformed=True,
    progressbar=True,
    maxeval=5000,
    model=None,
    *args,
    **kwargs
):
    """
    Finds the local maximum a posteriori point given a model.

    find_MAP should not be used to initialize the NUTS sampler. Simply call pymc3.sample() and it will automatically initialize NUTS in a better way.

    Parameters
    ----------
    start : `dict` of parameter values (Defaults to `model.test_point`)
    vars : list
        List of variables to optimize and set to optimum (Defaults to all continuous).
    method : string or callable
        Optimization algorithm (Defaults to 'L-BFGS-B' unless
        discrete variables are specified in `vars`, then
        `Powell` which will perform better).  For instructions on use of a callable,
        refer to SciPy's documentation of `optimize.minimize`.
    return_raw : bool
        Whether to return the full output of scipy.optimize.minimize (Defaults to `False`)
    include_transformed : bool
        Flag for reporting automatically transformed variables in addition
        to original variables (defaults to True).
    progressbar : bool
        Whether or not to display a progress bar in the command line.
    maxeval : int
        The maximum number of times the posterior distribution is evaluated.
    model : Model (optional if in `with` context)
    *args, **kwargs
        Extra args passed to scipy.optimize.minimize

    Notes
    -----
    Older code examples used find_MAP() to initialize the NUTS sampler,
    but this is not an effective way of choosing starting values for sampling.
    As a result, we have greatly enhanced the initialization of NUTS and
    wrapped it inside pymc3.sample() and you should thus avoid this method.
    """
    model = modelcontext(model)
    if start is None:
        start = model.test_point
    else:
        update_start_vals(start, model.test_point, model)

    if not set(start.keys()).issubset(model.named_vars.keys()):
        extra_keys = ", ".join(set(start.keys()) - set(model.named_vars.keys()))
        valid_keys = ", ".join(model.named_vars.keys())
        raise KeyError(
            "Some start parameters do not appear in the model!\n"
            "Valid keys are: {}, but {} was supplied".format(valid_keys, extra_keys)
        )

    if vars is None:
        vars = model.cont_vars
    vars = inputvars(vars)
    disc_vars = list(typefilter(vars, discrete_types))
    allinmodel(vars, model)

    start = Point(start, model=model)
    bij = DictToArrayBijection(ArrayOrdering(vars), start)
    logp_func = bij.mapf(model.fastlogp_nojac)
    x0 = bij.map(start)

    try:
        dlogp_func = bij.mapf(model.fastdlogp_nojac(vars))
        compute_gradient = True
    except (AttributeError, NotImplementedError, tg.NullTypeGradError):
        compute_gradient = False

    if disc_vars or not compute_gradient:
        pm._log.warning(
            "Warning: gradient not available."
            + "(E.g. vars contains discrete variables). MAP "
            + "estimates may not be accurate for the default "
            + "parameters. Defaulting to non-gradient minimization "
            + "'Powell'."
        )
        method = "Powell"

    if "fmin" in kwargs:
        fmin = kwargs.pop("fmin")
        warnings.warn(
            "In future versions, set the optimization algorithm with a string. "
            'For example, use `method="L-BFGS-B"` instead of '
            '`fmin=sp.optimize.fmin_l_bfgs_b"`.'
        )

        cost_func = CostFuncWrapper(maxeval, progressbar, logp_func)

        # Check to see if minimization function actually uses the gradient
        if "fprime" in getargspec(fmin).args:

            def grad_logp(point):
                return nan_to_num(-dlogp_func(point))

            opt_result = fmin(cost_func, bij.map(start), fprime=grad_logp, *args, **kwargs)
        else:
            # Check to see if minimization function uses a starting value
            if "x0" in getargspec(fmin).args:
                opt_result = fmin(cost_func, bij.map(start), *args, **kwargs)
            else:
                opt_result = fmin(cost_func, *args, **kwargs)

        if isinstance(opt_result, tuple):
            mx0 = opt_result[0]
        else:
            mx0 = opt_result
    else:
        # remove 'if' part, keep just this 'else' block after version change
        if compute_gradient:
            cost_func = CostFuncWrapper(maxeval, progressbar, logp_func, dlogp_func)
        else:
            cost_func = CostFuncWrapper(maxeval, progressbar, logp_func)

        try:
            opt_result = minimize(
                cost_func, x0, method=method, jac=compute_gradient, *args, **kwargs
            )
            mx0 = opt_result["x"]  # r -> opt_result
        except (KeyboardInterrupt, StopIteration) as e:
            mx0, opt_result = cost_func.previous_x, None
            if isinstance(e, StopIteration):
                pm._log.info(e)
        finally:
            last_v = cost_func.n_eval
            cost_func.progress.total = last_v
            cost_func.progress.update(last_v)

    vars = get_default_varnames(model.unobserved_RVs, include_transformed)
    mx = {var.name: value for var, value in zip(vars, model.fastfn(vars)(bij.rmap(mx0)))}

    if return_raw:
        return mx, opt_result
    else:
        return mx
Example #59
0
# FOR A PARTICULAR PURPOSE.
#
##############################################################################

"""
Change default behaviour of MailHost to send mails immediately.
In ERP5, we have Activity Tool to postpone mail delivery.
"""

from inspect import getargspec, isfunction
from Products.MailHost.MailHost import MailBase
import six

for f in six.itervalues(MailBase.__dict__):
  if isfunction(f):
    args, _, _, defaults = getargspec(f)
    try:
      i = args.index('immediate') - len(args)
    except ValueError:
      continue
    f.__defaults__ = defaults[:i] + (True,) + defaults[i+1 or len(args):]

from App.special_dtml import DTMLFile
MailBase.manage = MailBase.manage_main = DTMLFile('dtml/manageMailHost', globals())
MailBase.smtp_socket_timeout = 16.

from functools import partial
MailBase__makeMailer = MailBase._makeMailer
def _makeMailer(self):
  """ Create a SMTPMailer """
  smtp_mailer = MailBase__makeMailer(self)
Example #60
0
    def goto(self, nodename, raw_string):
        """
        Run a node by name

        Args:
            nodename (str or callable): Name of node or a callable
                to be called as `function(caller, raw_string)` or `function(caller)`
                to return the actual goto string.
            raw_string (str): The raw default string entered on the
                previous node (only used if the node accepts it as an
                argument)

        """
        if callable(nodename):
            try:
                if len(getargspec(nodename).args) > 1:
                    # callable accepting raw_string
                    nodename = nodename(self.caller, raw_string)
                else:
                    nodename = nodename(self.caller)
            except Exception:
                self.caller.msg(_ERR_GENERAL.format(nodename=nodename),
                                self._session)
                raise
        try:
            # execute the node, make use of the returns.
            nodetext, options = self._execute_node(nodename, raw_string)
        except EvMenuError:
            return

        if self._persistent:
            self.caller.attributes.add("_menutree_saved_startnode",
                                       (nodename, raw_string))

        # validation of the node return values
        helptext = ""
        if hasattr(nodetext, "__iter__"):
            if len(nodetext) > 1:
                nodetext, helptext = nodetext[:2]
            else:
                nodetext = nodetext[0]
        nodetext = "" if nodetext is None else str(nodetext)
        options = [options] if isinstance(options, dict) else options

        # this will be displayed in the given order
        display_options = []
        # this is used for lookup
        self.options = {}
        self.default = None
        if options:
            for inum, dic in enumerate(options):
                # fix up the option dicts
                keys = make_iter(dic.get("key"))
                if "_default" in keys:
                    keys = [key for key in keys if key != "_default"]
                    desc = dic.get(
                        "desc",
                        dic.get("text", _ERR_NO_OPTION_DESC).strip())
                    goto, execute = dic.get("goto",
                                            None), dic.get("exec", None)
                    self.default = (goto, execute)
                else:
                    keys = list(
                        make_iter(dic.get("key",
                                          str(inum + 1).strip())))
                    desc = dic.get(
                        "desc",
                        dic.get("text", _ERR_NO_OPTION_DESC).strip())
                    goto, execute = dic.get("goto",
                                            None), dic.get("exec", None)
                if keys:
                    display_options.append((keys[0], desc))
                    for key in keys:
                        if goto or execute:
                            self.options[strip_ansi(key).strip().lower()] = (
                                goto, execute)

        self.nodetext = self._format_node(nodetext, display_options)

        # handle the helptext
        if helptext:
            self.helptext = helptext
        elif options:
            self.helptext = _HELP_FULL if self.auto_quit else _HELP_NO_QUIT
        else:
            self.helptext = _HELP_NO_OPTIONS if self.auto_quit else _HELP_NO_OPTIONS_NO_QUIT

        self.display_nodetext()
        if not options:
            self.close_menu()