Ejemplo n.º 1
0
def test_better_wraps():
    def wraps_decorator(func):
        @wraps(func)
        def _wrap_wraps_decorator(*args, **kwargs):
            return func(*args, **kwargs)

        return _wrap_wraps_decorator

    def better_decorator(func):
        @better_wraps(func)
        def _wrap_better_decorator(*args, **kwargs):
            return func(*args, **kwargs)

        return _wrap_better_decorator

    @wraps_decorator
    def function1(a, b, c):
        return "function1"

    @better_decorator
    def function2(a, b, c):
        return "function2"

    eq_("function1", function1(1, 2, 3))
    # getargspec shim now can handle @wraps'ed functions just fine
    eq_(getargspec(function1)[0], ['a', 'b', 'c'])
    eq_("function2", function2(1, 2, 3))
    eq_(getargspec(function2)[0], ['a', 'b', 'c'])
Ejemplo n.º 2
0
def test_better_wraps():
    from functools import wraps
    from datalad.utils import getargspec

    def wraps_decorator(func):
        @wraps(func)
        def _wrap_wraps_decorator(*args, **kwargs):
            return func(*args, **kwargs)

        return _wrap_wraps_decorator

    def better_decorator(func):
        @better_wraps(func)
        def _wrap_better_decorator(*args, **kwargs):
            return func(*args, **kwargs)

        return _wrap_better_decorator

    @wraps_decorator
    def function1(a, b, c):
        return "function1"

    @better_decorator
    def function2(a, b, c):
        return "function2"

    eq_("function1", function1(1, 2, 3))
    eq_(getargspec(function1)[0], [])
    eq_("function2", function2(1, 2, 3))
    eq_(getargspec(function2)[0], ['a', 'b', 'c'])
Ejemplo n.º 3
0
    def eq_argspec(f, expected, has_kwonlyargs=False):
        """A helper to centralize testing of getargspec on original and wrapped function

        has_kwonlyargs is to instruct if function has kwonly args so we do not try to compare
        to inspect.get*spec functions, which would barf ValueError if attempted to run on a
        function with kwonlys. And also we pass it as include_kwonlyargs to our getargspec
        """
        # so we know that our expected is correct
        if not has_kwonlyargs:
            # if False - we test function with kwonlys - inspect.getargspec would barf
            eq_(inspect.getargspec(f), expected)
            # and getfullargspec[:4] wouldn't provide a full picture
            eq_(inspect.getfullargspec(f)[:4], expected)
        else:
            assert_raises(ValueError, inspect.getargspec, f)
            inspect.getfullargspec(f)  # doesn't barf
        eq_(getargspec(f, include_kwonlyargs=has_kwonlyargs), expected)

        # and lets try on a wrapped one -- only ours can do the right thing
        def decorator(f):
            @wraps(f)
            def wrapper(*args, **kwargs):  # pragma: no cover
                return f(*args, **kwargs)

            return wrapper

        fw = decorator(f)
        if has_kwonlyargs:
            # We barf ValueError similarly to inspect.getargspec, unless explicitly requested
            # to include kwonlyargs
            assert_raises(ValueError, getargspec, fw)
        eq_(getargspec(fw, include_kwonlyargs=has_kwonlyargs), expected)
Ejemplo n.º 4
0
    def call_from_parser(cls, args):
        # XXX needs safety check for name collisions
        from datalad.utils import getargspec
        argspec = getargspec(cls.__call__)
        if argspec[2] is None:
            # no **kwargs in the call receiver, pull argnames from signature
            argnames = getargspec(cls.__call__)[0]
        else:
            # common options
            # XXX define or better get from elsewhere
            common_opts = ('change_path', 'common_debug', 'common_idebug', 'func',
                           'help', 'log_level', 'logger', 'pbs_runner',
                           'result_renderer', 'subparser')
            argnames = [name for name in dir(args)
                        if not (name.startswith('_') or name in common_opts)]
        kwargs = {k: getattr(args, k)
                  for k in argnames
                  # some arguments might be Python-only and do not appear in the
                  # parser Namespace
                  if hasattr(args, k) and is_api_arg(k)}
        # we are coming from the entry point, this is the toplevel command,
        # let it run like generator so we can act on partial results quicker
        # TODO remove following condition test when transition is complete and
        # run indented code unconditionally
        if _has_eval_results_call(cls):
            # set all common args explicitly  to override class defaults
            # that are tailored towards the the Python API
            kwargs['return_type'] = 'generator'
            kwargs['result_xfm'] = None
            # allow commands to override the default, unless something other than
            # default is requested
            kwargs['result_renderer'] = \
                args.common_output_format if args.common_output_format != 'tailored' \
                else getattr(cls, 'result_renderer', 'default')
            if '{' in args.common_output_format:
                # stupid hack, could and should become more powerful
                kwargs['result_renderer'] = DefaultOutputRenderer(args.common_output_format)

            if args.common_on_failure:
                kwargs['on_failure'] = args.common_on_failure
            # compose filter function from to be invented cmdline options
            res_filter = cls._get_result_filter(args)
            if res_filter is not None:
                # Don't add result_filter if it's None because then
                # eval_results can't distinguish between --report-{status,type}
                # not specified via the CLI and None passed via the Python API.
                kwargs['result_filter'] = res_filter
        try:
            ret = cls.__call__(**kwargs)
            if inspect.isgenerator(ret):
                ret = list(ret)
            return ret
        except KeyboardInterrupt as exc:
            ui.error("\nInterrupted by user while doing magic: %s" % exc_str(exc))
            if cls._interrupted_exit_code is not None:
                sys.exit(cls._interrupted_exit_code)
            else:
                raise
Ejemplo n.º 5
0
def test_eval_results_plus_build_doc():

    # test docs

    # docstring was build already:
    with swallow_logs(new_level=logging.DEBUG) as cml:
        TestUtils().__call__(1)
        assert_not_in("Building doc for", cml.out)
    # docstring accessible both ways:
    doc1 = Dataset.fake_command.__doc__
    doc2 = TestUtils().__call__.__doc__

    # docstring was built from Test_Util's definition:
    assert_equal(doc1, doc2)
    assert_in("TestUtil's fake command", doc1)
    assert_in("Parameters", doc1)
    assert_in("It's a number", doc1)

    # docstring shows correct override values of defaults in eval_params
    assert_in("Default: 'tailored'", doc1)
    assert_in("Default: 'item-or-list'", doc1)

    # docstring also contains eval_result's parameters:
    assert_in("result_filter", doc1)
    assert_in("return_type", doc1)
    assert_in("list", doc1)
    assert_in("None", doc1)
    assert_in("return value behavior", doc1)
    assert_in("dictionary is passed", doc1)

    # test eval_results is able to determine the call, a method of which it is
    # decorating:
    with swallow_logs(new_level=logging.DEBUG) as cml:
        Dataset('/does/not/matter').fake_command(3)
        assert_in(
            "Determined class of decorated function: {}"
            "".format(TestUtils().__class__), cml.out)

    # test results:
    result = TestUtils().__call__(2)
    assert_equal(len(list(result)), 2)
    result = Dataset('/does/not/matter').fake_command(3)
    assert_equal(len(list(result)), 3)

    # test absent side-effect of popping eval_defaults
    kwargs = dict(return_type='list')
    TestUtils().__call__(2, **kwargs)
    assert_equal(list(kwargs), ['return_type'])

    # test signature:
    from datalad.utils import getargspec
    assert_equal(
        getargspec(Dataset.fake_command)[0],
        ['number', 'dataset', 'result_fn'])
    assert_equal(
        getargspec(TestUtils.__call__)[0], ['number', 'dataset', 'result_fn'])
Ejemplo n.º 6
0
def get_allargs_as_kwargs(call, args, kwargs):
    """Generate a kwargs dict from a call signature and *args, **kwargs

    Basically resolving the argnames for all positional arguments, and
    resolvin the defaults for all kwargs that are not given in a kwargs
    dict
    """
    from datalad.utils import getargspec
    argspec = getargspec(call)
    defaults = argspec.defaults
    nargs = len(argspec.args)
    assert (nargs >= len(defaults))
    # map any args to their name
    argmap = list(zip(argspec.args[:len(args)], args))
    kwargs_ = OrderedDict(argmap)
    # map defaults of kwargs to their names (update below)
    for k, v in zip(argspec.args[-len(defaults):], defaults):
        if k not in kwargs_:
            kwargs_[k] = v
    # update with provided kwarg args
    kwargs_.update(kwargs)
    # XXX we cannot assert the following, because our own highlevel
    # API commands support more kwargs than what is discoverable
    # from their signature...
    #assert (nargs == len(kwargs_))
    return kwargs_
Ejemplo n.º 7
0
def _test_consistent_order_of_args(intf, spec_posargs):
    f = getattr(intf, '__call__')
    args, varargs, varkw, defaults = getargspec(f)
    # now verify that those spec_posargs are first among args
    if not spec_posargs:
        raise SkipTest("no positional args")  # print intf, "skipped"


#    else:
#        print intf, spec_posargs
    if intf.__name__ == 'Save':
        # it makes sense there to have most common argument first
        # -- the message. But we don't enforce it on cmdline so it is
        # optional
        spec_posargs.add('message')
    elif intf.__name__ == 'ExtractMetadata':
        # MIH I was never sure what this test enforces and it takes
        # me ages each time to try to wrap my head around it
        # I am confident that I do not want to change the API of this
        # command now that it is a command and no longer a plugin
        # hence this exception
        eq_(spec_posargs, spec_posargs)
        return

    eq_(set(args[:len(spec_posargs)]), spec_posargs)
Ejemplo n.º 8
0
def setup_parser_for_interface(parser, cls, completing=False):
    # XXX needs safety check for name collisions
    # XXX allow for parser kwargs customization
    # get the signature, order of arguments is taken from it
    ndefaults = 0
    args, varargs, varkw, defaults = getargspec(cls.__call__,
                                                include_kwonlyargs=True)
    if defaults is not None:
        ndefaults = len(defaults)
    default_offset = ndefaults - len(args)
    prefix_chars = parser.prefix_chars
    for i, arg in enumerate(args):
        if not is_api_arg(arg):
            continue

        param = cls._params_[arg]
        defaults_idx = default_offset + i
        if param.cmd_args == tuple():
            # explicitly provided an empty sequence of argument names
            # this shall not appear in the parser
            continue

        # set up the parameter
        setup_parserarg_for_interface(parser,
                                      arg,
                                      param,
                                      defaults_idx,
                                      prefix_chars,
                                      defaults,
                                      completing=completing)
Ejemplo n.º 9
0
    def apply_func(wrapped, instance, args, kwargs):
        # Wrapper function to assign arguments of the bound function to
        # original function.
        #
        # Note
        # ----
        # This wrapper is NOT returned by the decorator, but only used to bind
        # the function `f` to the Dataset class.

        kwargs = kwargs.copy()
        from datalad.utils import getargspec
        orig_pos = getargspec(f).args

        # If bound function is used with wrong signature (especially by
        # explicitly passing a dataset, let's raise a proper exception instead
        # of a 'list index out of range', that is not very telling to the user.
        if len(args) >= len(orig_pos):
            raise TypeError(
                "{0}() takes at most {1} arguments ({2} given):"
                " {3}".format(name, len(orig_pos), len(args), ['self'] +
                              [a for a in orig_pos if a != dataset_argname]))
        if dataset_argname in kwargs:
            raise TypeError("{}() got an unexpected keyword argument {}"
                            "".format(name, dataset_argname))
        kwargs[dataset_argname] = instance
        ds_index = orig_pos.index(dataset_argname)
        for i in range(0, len(args)):
            if i < ds_index:
                kwargs[orig_pos[i]] = args[i]
            elif i >= ds_index:
                kwargs[orig_pos[i + 1]] = args[i]
        return f(**kwargs)
Ejemplo n.º 10
0
def _test_consistent_order_of_args(intf, spec_posargs):
    f = getattr(intf, '__call__')
    args, varargs, varkw, defaults = getargspec(f)
    # now verify that those spec_posargs are first among args
    if not spec_posargs:
        raise SkipTest("no positional args")  # print intf, "skipped"


#    else:
#        print intf, spec_posargs
    if intf.__name__ == 'Save':
        # it makes sense there to have most common argument first
        # -- the message. But we don't enforce it on cmdline so it is
        # optional
        spec_posargs.add('message')
    elif intf.__name__ in ('AddReadme', 'Addurls', 'ExportArchive',
                           'ExportToFigshare', 'ExtractMetadata'):
        if intf.__name__ != 'ExtractMetadata':
            # ex-plugins had 'dataset' as the first positional argument
            # and ExtractMetadata has 'types' as the first positional arg
            eq_(args[0], 'dataset')
        eq_(spec_posargs, spec_posargs)
        return

    eq_(set(args[:len(spec_posargs)]), spec_posargs)
Ejemplo n.º 11
0
def get_result_filter(fx):
    """Wrap a filter into a helper to be able to accept additional
    arguments, if the filter doesn't support it already"""
    _fx = fx
    if fx and not getargspec(fx).keywords:
        def _fx(res, **kwargs):
            return fx(res)
    return _fx
Ejemplo n.º 12
0
def update_docstring_with_parameters(func,
                                     params,
                                     prefix=None,
                                     suffix=None,
                                     add_args=None):
    """Generate a useful docstring from a parameter spec

    Amends any existing docstring of a callable with a textual
    description of its parameters. The Parameter spec needs to match
    the number and names of the callables arguments.
    """
    from datalad.utils import getargspec
    # get the signature
    args, varargs, varkw, defaults = getargspec(func, include_kwonlyargs=True)
    defaults = defaults or tuple()
    if add_args:
        add_argnames = sorted(add_args.keys())
        args.extend(add_argnames)
        defaults = defaults + tuple(add_args[k] for k in add_argnames)
    ndefaults = len(defaults)
    # start documentation with what the callable brings with it
    doc = prefix if prefix else u''
    if len(args) > 1:
        if len(doc):
            if not doc.endswith('\n'):
                doc += '\n'
            doc += '\n'
        doc += "Parameters\n----------\n"
        for i, arg in enumerate(args):
            if not is_api_arg(arg):
                continue
            # we need a parameter spec for each argument
            if not arg in params:
                raise ValueError(
                    "function has argument '%s' not described as a parameter" %
                    arg)
            param = params[arg]
            # validate the default -- to make sure that the parameter description is
            # somewhat OK
            defaults_idx = ndefaults - len(args) + i
            if defaults_idx >= 0:
                if param.constraints is not None:
                    param.constraints(defaults[defaults_idx])
            orig_docs = param._doc
            param._doc = alter_interface_docs_for_api(param._doc)
            doc += param.get_autodoc(
                arg,
                default=defaults[defaults_idx] if defaults_idx >= 0 else None,
                has_default=defaults_idx >= 0)
            param._doc = orig_docs
            doc += '\n'
    doc += suffix if suffix else u""
    # assign the amended docs
    func.__doc__ = doc
    return func
Ejemplo n.º 13
0
def get_function_nargs(f):
    while hasattr(f, 'wrapped'):
        f = f.wrapped
    argspec = getargspec(f)
    assert not argspec.keywords, \
        "ATM we have none defined with keywords, so disabling having them"
    if argspec.varargs:
        # Variable number of arguments
        return -1
    else:
        assert argspec.args, "ATM no static methods"
        assert argspec.args[0] == "self"
        return len(argspec.args) - 1
Ejemplo n.º 14
0
 def setup_parser(cls, parser):
     # XXX needs safety check for name collisions
     # XXX allow for parser kwargs customization
     parser_kwargs = {}
     from datalad.utils import getargspec
     # get the signature
     ndefaults = 0
     args, varargs, varkw, defaults = getargspec(cls.__call__)
     if not defaults is None:
         ndefaults = len(defaults)
     for i, arg in enumerate(args):
         if not is_api_arg(arg):
             continue
         param = cls._params_[arg]
         defaults_idx = ndefaults - len(args) + i
         cmd_args = param.cmd_args
         if cmd_args == tuple():
             # explicitly provided an empty sequence of argument names
             # this shall not appear in the parser
             continue
         elif cmd_args is None:
             cmd_args = []
         if not len(cmd_args):
             if defaults_idx >= 0:
                 # dealing with a kwarg
                 template = '--%s'
             else:
                 # positional arg
                 template = '%s'
             # use parameter name as default argument name
             parser_args = (template % arg.replace('_', '-'), )
         else:
             parser_args = [c.replace('_', '-') for c in cmd_args]
         parser_kwargs = param.cmd_kwargs
         if defaults_idx >= 0:
             parser_kwargs['default'] = defaults[defaults_idx]
         help = alter_interface_docs_for_cmdline(param._doc)
         if help and help.rstrip()[-1] != '.':
             help = help.rstrip() + '.'
         if param.constraints is not None:
             parser_kwargs['type'] = param.constraints
             # include value constraint description and default
             # into the help string
             cdoc = alter_interface_docs_for_cmdline(
                 param.constraints.long_description())
             if cdoc[0] == '(' and cdoc[-1] == ')':
                 cdoc = cdoc[1:-1]
             help += '  Constraints: %s' % cdoc
             if 'metavar' not in parser_kwargs and \
                     isinstance(param.constraints, EnsureChoice):
                 parser_kwargs['metavar'] = \
                     '{%s}' % '|'.join(
                         # don't use short_description(), because
                         # it also needs to give valid output for
                         # Python syntax (quotes...), but here we
                         # can simplify to shell syntax where everything
                         # is a string
                         p for p in param.constraints._allowed
                         # in the cmdline None pretty much means
                         # don't give the options, so listing it
                         # doesn't make sense. Moreover, any non-string
                         # value cannot be given and very likely only
                         # serves a special purpose in the Python API
                         # or implementation details
                         if isinstance(p, str))
         if defaults_idx >= 0:
             # if it is a flag, in commandline it makes little sense to show
             # showing the Default: (likely boolean).
             #   See https://github.com/datalad/datalad/issues/3203
             if not parser_kwargs.get('action', '').startswith('store_'):
                 # [Default: None] also makes little sense for cmdline
                 if defaults[defaults_idx] is not None:
                     help += " [Default: %r]" % (defaults[defaults_idx], )
         # create the parameter, using the constraint instance for type
         # conversion
         parser.add_argument(*parser_args, help=help, **parser_kwargs)
Ejemplo n.º 15
0
    def setup_parser(cls, parser):
        # XXX needs safety check for name collisions
        # XXX allow for parser kwargs customization
        parser_kwargs = {}
        from datalad.utils import getargspec
        # get the signature
        ndefaults = 0
        args, varargs, varkw, defaults = getargspec(cls.__call__)
        if defaults is not None:
            ndefaults = len(defaults)
        default_offset = ndefaults - len(args)
        prefix_chars = parser.prefix_chars
        for i, arg in enumerate(args):
            if not is_api_arg(arg):
                continue
            param = cls._params_[arg]
            defaults_idx = default_offset + i
            cmd_args = param.cmd_args
            if cmd_args == tuple():
                # explicitly provided an empty sequence of argument names
                # this shall not appear in the parser
                continue

            parser_kwargs = param.cmd_kwargs
            has_default = defaults_idx >= 0
            if cmd_args:
                if cmd_args[0][0] in prefix_chars:
                    # TODO: All the Paramteter(args=...) values in this code
                    # base use hyphens, so there is no point in the below
                    # conversion. If it looks like no extensions rely on this
                    # behavior either, this could be dropped.
                    parser_args = [c.replace('_', '-') for c in cmd_args]
                else:
                    # Argparse will not convert dashes to underscores for
                    # arguments that don't start with a prefix character, so
                    # the above substitution must be avoided so that
                    # call_from_parser() can find the corresponding parameter.
                    parser_args = cmd_args
            elif has_default:
                # Construct the option from the Python parameter name.
                parser_args = ("--{}".format(arg.replace("_", "-")),)
            else:
                # If args= wasn't given and its a positional argument in the
                # function, add a positional argument to argparse. If `dest` is
                # specified, we need to remove it from the keyword arguments
                # because add_argument() expects it as the first argument. Note
                # that `arg` shouldn't have a dash here, but `metavar` can be
                # used if a dash is preferred for the command-line help.
                parser_args = (parser_kwargs.pop("dest", arg),)

            if has_default:
                parser_kwargs['default'] = defaults[defaults_idx]
            help = alter_interface_docs_for_cmdline(param._doc)
            if help and help.rstrip()[-1] != '.':
                help = help.rstrip() + '.'
            if param.constraints is not None:
                parser_kwargs['type'] = param.constraints
                # include value constraint description and default
                # into the help string
                cdoc = alter_interface_docs_for_cmdline(
                    param.constraints.long_description())
                if cdoc[0] == '(' and cdoc[-1] == ')':
                    cdoc = cdoc[1:-1]
                help += '  Constraints: %s' % cdoc
                if 'metavar' not in parser_kwargs and \
                        isinstance(param.constraints, EnsureChoice):
                    parser_kwargs['metavar'] = \
                        '{%s}' % '|'.join(
                            # don't use short_description(), because
                            # it also needs to give valid output for
                            # Python syntax (quotes...), but here we
                            # can simplify to shell syntax where everything
                            # is a string
                            p for p in param.constraints._allowed
                            # in the cmdline None pretty much means
                            # don't give the options, so listing it
                            # doesn't make sense. Moreover, any non-string
                            # value cannot be given and very likely only
                            # serves a special purpose in the Python API
                            # or implementation details
                            if isinstance(p, str))
            if defaults_idx >= 0:
                # if it is a flag, in commandline it makes little sense to show
                # showing the Default: (likely boolean).
                #   See https://github.com/datalad/datalad/issues/3203
                if not parser_kwargs.get('action', '').startswith('store_'):
                    # [Default: None] also makes little sense for cmdline
                    if defaults[defaults_idx] is not None:
                        help += " [Default: %r]" % (defaults[defaults_idx],)
            # create the parameter, using the constraint instance for type
            # conversion
            parser.add_argument(*parser_args, help=help,
                                **parser_kwargs)
Ejemplo n.º 16
0
class Parameter(object):
    """This class shall serve as a representation of a parameter.
    """

    # Known keyword arguments which we want to allow to pass over into
    # argparser.add_argument . Mentioned explicitly, since otherwise
    # are not verified while working in Python-only API
    _KNOWN_ARGS = getargspec(argparse.Action.__init__)[0] + ['action']

    def __init__(self, constraints=None, doc=None, args=None, **kwargs):
        """Add constraints (validator) specifications and a docstring for
        a parameter.

        Parameters
        ----------
        constraints : callable
          A functor that takes any input value, performs checks or type
          conversions and finally returns a value that is appropriate for a
          parameter or raises an exception. This will also be used to set up
          the ``type`` functionality of argparse.add_argument.
        doc : str
          Documentation about the purpose of this parameter.
        args : tuple or None
          Any additional positional args for argparser.add_argument. This is
          most useful for assigned multiple alternative argument names or
          create positional arguments.
        **kwargs :
          Any additional keyword args for argparser.add_argument.

        Examples
        --------
        Ensure a parameter is a float
        >>> from datalad.support.param import Parameter
        >>> from datalad.support.constraints import (EnsureFloat, EnsureRange,
        ...                              AltConstraints, Constraints)
        >>> C = Parameter(constraints=EnsureFloat())

        Ensure a parameter is of type float or None:
        >>> C = Parameter(constraints=AltConstraints(EnsureFloat(), None))

        Ensure a parameter is None or of type float and lies in the inclusive
        range (7.0,44.0):
        >>> C = Parameter(
        ...         AltConstraints(
        ...             Constraints(EnsureFloat(),
        ...                         EnsureRange(min=7.0, max=44.0)),
        ...             None))
        """
        self.constraints = expand_constraint_spec(constraints)
        self._doc = doc
        self.cmd_args = args

        # Verify that no mistyped kwargs present
        unknown_args = set(kwargs).difference(self._KNOWN_ARGS)
        if unknown_args:
            raise ValueError(
                "Detected unknown argument(s) for the Parameter: %s.  Known are: %s"
                % (', '.join(unknown_args), ', '.join(self._KNOWN_ARGS))
            )
        self.cmd_kwargs = kwargs

    def get_autodoc(self, name, indent="  ", width=70, default=None, has_default=False):
        """Docstring for the parameter to be used in lists of parameters

        Returns
        -------
        string or list of strings (if indent is None)
        """
        paramsdoc = '%s' % name
        sdoc = None
        if self.constraints is not None:
            sdoc = self.constraints.short_description()
        elif 'action' in self.cmd_kwargs \
                and self.cmd_kwargs['action'] in ("store_true", "store_false"):
            sdoc = 'bool'
        if sdoc is not None:
            if sdoc[0] == '(' and sdoc[-1] == ')':
                sdoc = sdoc[1:-1]
            nargs = self.cmd_kwargs.get('nargs', '')
            if isinstance(nargs, int):
                sdoc = '{}-item sequence of {}'.format(nargs, sdoc)
            elif nargs == '+':
                sdoc = 'non-empty sequence of {}'.format(sdoc)
            elif nargs == '*':
                sdoc = 'sequence of {}'.format(sdoc)
            if self.cmd_kwargs.get('action', None) == 'append':
                sdoc = 'list of {}'.format(sdoc)
            paramsdoc += " : %s" % sdoc
            if has_default:
                paramsdoc += ", optional"
        paramsdoc = [paramsdoc]

        doc = self._doc
        if doc is None:
            doc = ''
        doc = doc.strip()
        if len(doc) and not doc.endswith('.'):
            doc += '.'
        if has_default:
            doc += " [Default: %r]" % (default,)
        # Explicitly deal with multiple spaces, for some reason
        # replace_whitespace is non-effective
        doc = _whitespace_re.sub(' ', doc)
        paramsdoc += [indent + x
                      for x in textwrap.wrap(doc, width=width - len(indent),
                                             replace_whitespace=True)]
        return '\n'.join(paramsdoc)