コード例 #1
0
ファイル: static_autogen.py プロジェクト: Zheaoli/ubelt
def _static_parse_imports(modpath, imports=None, use_all=True):
    # from ubelt.meta import static_analysis as static
    # TODO: port some of this functionality over
    from xdoctest import static_analysis as static
    modname = static.modpath_to_modname(modpath)
    if imports is not None:
        import_paths = {
            m: static.modname_to_modpath(modname + '.' + m, hide_init=False)
            for m in imports
        }
    else:
        imports = []
        import_paths = {}
        for sub_modpath in static.package_modpaths(modpath,
                                                   with_pkg=True,
                                                   recursive=False):
            # print('sub_modpath = {!r}'.format(sub_modpath))
            sub_modname = static.modpath_to_modname(sub_modpath)
            rel_modname = sub_modname[len(modname) + 1:]
            if rel_modname.startswith('_'):
                continue
            if not rel_modname:
                continue
            import_paths[rel_modname] = sub_modpath
            imports.append(rel_modname)
        imports = sorted(imports)

    from_imports = []
    for rel_modname in imports:
        sub_modpath = import_paths[rel_modname]
        with open(sub_modpath, 'r') as file:
            source = file.read()
        valid_callnames = None
        if use_all:
            try:
                valid_callnames = static.parse_static_value('__all__', source)
            except NameError:
                pass
        if valid_callnames is None:
            # The __all__ variable is not specified or we dont care
            top_level = static.TopLevelVisitor.parse(source)
            attrnames = list(top_level.assignments) + list(
                top_level.calldefs.keys())
            # list of names we wont export by default
            invalid_callnames = dir(builtins)
            valid_callnames = []
            for attr in attrnames:
                if '.' in attr or attr.startswith('_'):
                    continue
                if attr in invalid_callnames:
                    continue
                valid_callnames.append(attr)
        from_imports.append((rel_modname, sorted(valid_callnames)))
    return modname, imports, from_imports
コード例 #2
0
def test_package_submodules():
    """
    CommandLine:
        pytest testing/test_static.py::test_package_submodules -s
        pass

    Ignore:
        import sys
        sys.path.append('/home/joncrall/code/xdoctest/testing')
        from test_static import *
        temp = utils.TempDir()
        temp.__enter__()
        sys.path.append(temp.dpath)

        temp.__exit__(None, None, None)
    """
    with utils.TempDir() as temp:
        dpath = temp.dpath

        # Create a dummy package heirachy
        root = utils.ensuredir((dpath, '_tmproot'))
        sub1 = utils.ensuredir((root, 'sub1'))
        sub2 = utils.ensuredir((sub1, 'sub2'))

        root_init = touch((root, '__init__.py'))
        sub1_init = touch((sub1, '__init__.py'))
        sub2_init = touch((sub2, '__init__.py'))

        mod0 = touch((root, 'mod0.py'))
        mod1 = touch((sub1, 'mod1.py'))
        mod2 = touch((sub2, 'mod2.py'))

        root_main = touch((root, '__main__.py'))
        sub2_main = touch((sub2, '__main__.py'))

        bad1 = utils.ensuredir((root, 'bad1'))
        bad2 = utils.ensuredir((sub1, 'bad2'))
        b0 = touch((bad1, 'b0.py'))
        b1 = touch((bad2, 'b1.py'))

        with utils.PythonPathContext(dpath):
            subpaths = sorted(static.package_modpaths(root, with_pkg=True))

            # should only return files not directories
            assert root_init in subpaths
            assert sub1_init in subpaths
            assert sub2_init in subpaths
            assert root not in subpaths
            assert sub1 not in subpaths
            assert sub2 not in subpaths

            assert root_main in subpaths
            assert sub2_main in subpaths

            assert mod0 in subpaths
            assert mod1 in subpaths
            assert mod2 in subpaths

            assert bad1 not in subpaths
            assert b0 not in subpaths
            assert b1 not in subpaths

            assert '_tmproot' not in sys.modules
            assert '_tmproot.mod0' not in sys.modules
            assert '_tmproot.sub1' not in sys.modules
            assert '_tmproot.sub1.mod1' not in sys.modules
            assert '_tmproot.sub1.sub2' not in sys.modules
            assert '_tmproot.sub1.mod2.mod2' not in sys.modules
コード例 #3
0
def package_calldefs(pkg_identifier,
                     exclude=[],
                     ignore_syntax_errors=True,
                     analysis='auto'):
    """
    Statically generates all callable definitions in a module or package

    Args:
        pkg_identifier (str | Module): path to or name of the module to be
            tested (or the live module itself, which is not recommended)

        exclude (List[str]): glob-patterns of file names to exclude

        ignore_syntax_errors (bool, default=True):
            if False raise an error when syntax errors occur in a doctest

        analysis (str, default='auto'):
            if 'static', only static analysis is used to parse call
            definitions. If 'auto', uses dynamic analysis for compiled python
            extensions, but static analysis elsewhere, if 'dynamic', then
            dynamic analysis is used to parse all calldefs.

    Yields:
        Tuple[Dict[str, CallDefNode], str | Module] -
            * item[0]: the mapping of callnames-to-calldefs
            * item[1]: the path to the file containing the doctest
              (usually a module) or the module itself

    Example:
        >>> pkg_identifier = 'xdoctest.core'
        >>> testables = list(package_calldefs(pkg_identifier))
        >>> assert len(testables) == 1
        >>> calldefs, modpath = testables[0]
        >>> assert util_import.modpath_to_modname(modpath) == pkg_identifier
        >>> assert 'package_calldefs' in calldefs
    """
    if DEBUG:
        print('Find package calldefs: pkg_identifier = {!r}'.format(
            pkg_identifier))

    if isinstance(pkg_identifier, types.ModuleType):
        # Case where we are forced to use a live module
        identifiers = [pkg_identifier]
    else:
        pkgpath = _rectify_to_modpath(pkg_identifier)
        identifiers = list(
            static_analysis.package_modpaths(pkgpath,
                                             with_pkg=True,
                                             with_libs=True))

    for module_identifier in identifiers:
        if isinstance(module_identifier, six.string_types):
            modpath = module_identifier
            modname = util_import.modpath_to_modname(modpath)
            if any(fnmatch(modname, pat) for pat in exclude):
                continue
            if not exists(modpath):
                warnings.warn('Module {} does not exist. '
                              'Is it an old pyc file?'.format(modname))
                continue
        try:
            calldefs = parse_calldefs(module_identifier, analysis=analysis)
            if calldefs is not None:
                yield calldefs, module_identifier
        except SyntaxError as ex:
            # Handle error due to the actual code containing errors
            msg = 'Cannot parse module={}.\nCaused by: {}'
            msg = msg.format(module_identifier, ex)
            if ignore_syntax_errors:
                warnings.warn(msg)  # real code or docstr contained errors
            else:
                raise SyntaxError(msg)
コード例 #4
0
ファイル: core.py プロジェクト: MatsLanGoH/xdoctest
def package_calldefs(modpath_or_name, exclude=[], ignore_syntax_errors=True):
    """
    Statically generates all callable definitions in a module or package

    Args:
        modpath_or_name (str): path to or name of the module to be tested
        exclude (list): glob-patterns of file names to exclude
        ignore_syntax_errors (bool): if False raise an error when syntax errors
            occur in a doctest (default True)

    Example:
        >>> modpath_or_name = 'xdoctest.core'
        >>> testables = list(package_calldefs(modpath_or_name))
        >>> assert len(testables) == 1
        >>> calldefs, modpath = testables[0]
        >>> assert static.modpath_to_modname(modpath) == modpath_or_name
        >>> assert 'package_calldefs' in calldefs
    """
    pkgpath = _rectify_to_modpath(modpath_or_name)

    modpaths = static.package_modpaths(pkgpath, with_pkg=True, with_libs=True)
    modpaths = list(modpaths)
    for modpath in modpaths:
        modname = static.modpath_to_modname(modpath)
        if any(fnmatch(modname, pat) for pat in exclude):
            continue
        if not exists(modpath):
            warnings.warn('Module {} does not exist. '
                          'Is it an old pyc file?'.format(modname))
            continue

        FORCE_DYNAMIC = '--xdoc-force-dynamic' in sys.argv
        # if false just skip extension modules
        ALLOW_DYNAMIC = '--no-xdoc-dynamic' not in sys.argv

        if FORCE_DYNAMIC:
            # Force dynamic parsing for everything
            do_dynamic = True
        else:
            # Some modules can only be parsed dynamically
            needs_dynamic = modpath.endswith(static._platform_pylib_exts())
            do_dynamic = needs_dynamic and ALLOW_DYNAMIC

        if do_dynamic:
            try:
                calldefs = dynamic.parse_dynamic_calldefs(modpath)
            except ImportError as ex:
                # Some modules are just c modules
                msg = 'Cannot dynamically parse module={} at path={}.\nCaused by: {}'
                msg = msg.format(modname, modpath, ex)
                warnings.warn(msg)  # real code contained errors
            except Exception as ex:
                msg = 'Cannot dynamically parse module={} at path={}.\nCaused by: {}'
                msg = msg.format(modname, modpath, ex)
                warnings.warn(msg)  # real code contained errors
                raise
            else:
                yield calldefs, modpath
        else:
            try:
                calldefs = static.parse_calldefs(fpath=modpath)
            except SyntaxError as ex:
                # Handle error due to the actual code containing errors
                msg = 'Cannot parse module={} at path={}.\nCaused by: {}'
                msg = msg.format(modname, modpath, ex)
                if ignore_syntax_errors:
                    warnings.warn(msg)  # real code contained errors
                    continue
                else:
                    raise SyntaxError(msg)
            else:
                yield calldefs, modpath
コード例 #5
0
def generate_typed_stubs(modpath):
    """
    Attempt to use google-style docstrings, xdoctest, and mypy to generate
    typed stub files.

    Does not overwrite anything by itself.

    Args:
        modpath (PathLike): path to the module to generate types for

    Returns:
        Dict[PathLike, str]:
            A dictionary mapping the path of each file to write to the text to
            be written.

    Notes:
        FIXME: This currently requires my hacked version of mypy

    Example:
        >>> # xdoctest: +SKIP
        >>> # xdoctest: +REQUIRES(module:mypy)
        >>> # xdoctest: +REQUIRES(--hacked)
        >>> from xdev.cli.docstr_stubgen import *  # NOQA
        >>> import xdev
        >>> import ubelt as ub
        >>> from xdev.cli import docstr_stubgen
        >>> modpath = ub.Path(docstr_stubgen.__file__)
        >>> generated = generate_typed_stubs(modpath)
        >>> text = generated[ub.peek(generated.keys())]
        >>> assert 'PathLike' in text
        >>> assert 'Dict' in text
        >>> print(text)

    Ignore:
        pyfile mypy.stubgen
        # Delete compiled verisons so we can hack it

        # ls $VIRTUAL_ENV/lib/*/site-packages/mypy/*.so
        # rm $VIRTUAL_ENV/lib/*/site-packages/mypy/*.so
        # rm ~/.pyenv/versions/3.8.6/envs/pyenv3.8.6/lib/python3.8/site-packages/mypy/*.cpython-38-x86_64-linux-gnu.so

        # This works I think?
        if [[ ! -e "$HOME/code/mypy" ]];  then
            git clone https://github.com/python/mypy.git $HOME/code/mypy
        fi
        (cd $HOME/code/mypy && git pull)
        pip install -e $HOME/code/mypy


        pip install MonkeyType

        monkeytype run run_tests.py
        monkeytype stub ubelt.util_dict

        from typing import TypeVar
        from mypy.applytype import get_target_type
        z = TypeVar('Iterable')
        get_target_type(z)

        from mypy.expandtype import expand_type
        expand_type(z, env={})

        from mypy.types import get_proper_type
        get_proper_type(z)
        get_proper_type(dict)
        import typing
        get_proper_type(typing.Iterable)

        from mypy.types import deserialize_type, UnboundType
        import mypy.types as mypy_types
        z = UnboundType('Iterable')
        get_proper_type(dict)

        from mypy.fastparse import parse_type_string
        parse_type_string('dict', 'dict', 0, 0)
        z = parse_type_string('typing.Iterator', 'Any', 0, 0)
        get_proper_type(z)

    """
    # import pathlib
    # import ubelt as ub
    import os
    from mypy import stubgen
    from mypy import defaults
    from xdoctest import static_analysis
    from os.path import join
    import ubelt as ub

    # modname = 'scriptconfig'
    # module = ub.import_module_from_name(modname)
    # modpath = ub.Path(module.__file__).parent

    # for p in pathlib.Path(modpath).glob('*.pyi'):
    #     p.unlink()
    modpath = ub.Path(modpath)

    files = list(
        static_analysis.package_modpaths(modpath,
                                         recursive=True,
                                         with_libs=1,
                                         with_pkg=0))

    # files = [f for f in files if 'deprecated' not in f]
    # files = [join(ubelt_dpath, 'util_dict.py')]

    if modpath.is_file():
        output_dir = modpath.parent.parent
    else:
        output_dir = modpath.parent

    options = stubgen.Options(pyversion=defaults.PYTHON3_VERSION,
                              no_import=True,
                              doc_dir='',
                              search_path=[],
                              interpreter=sys.executable,
                              ignore_errors=False,
                              parse_only=True,
                              include_private=False,
                              output_dir=output_dir,
                              modules=[],
                              packages=[],
                              files=files,
                              verbose=False,
                              quiet=False,
                              export_less=True)
    # generate_stubs(options)

    mypy_opts = stubgen.mypy_options(options)
    py_modules, c_modules = stubgen.collect_build_targets(options, mypy_opts)

    # Collect info from docs (if given):
    sigs = class_sigs = None  # type: Optional[Dict[str, str]]
    if options.doc_dir:
        sigs, class_sigs = stubgen.collect_docs_signatures(options.doc_dir)

    # Use parsed sources to generate stubs for Python modules.
    stubgen.generate_asts_for_modules(py_modules, options.parse_only,
                                      mypy_opts, options.verbose)

    generated = {}

    for mod in py_modules:
        assert mod.path is not None, "Not found module was not skipped"
        target = mod.module.replace('.', '/')
        if os.path.basename(mod.path) == '__init__.py':
            target += '/__init__.pyi'
        else:
            target += '.pyi'
        target = join(options.output_dir, target)
        files.append(target)
        with stubgen.generate_guarded(mod.module, target,
                                      options.ignore_errors, options.verbose):
            stubgen.generate_stub_from_ast(mod, target, options.parse_only,
                                           options.pyversion,
                                           options.include_private,
                                           options.export_less)

            gen = ExtendedStubGenerator(
                mod.runtime_all,
                pyversion=options.pyversion,
                include_private=options.include_private,
                analyzed=not options.parse_only,
                export_less=options.export_less)
            assert mod.ast is not None, "This function must be used only with analyzed modules"
            mod.ast.accept(gen)
            # print('gen.import_tracker.required_names = {!r}'.format(gen.import_tracker.required_names))
            # print(gen.import_tracker.import_lines())

            # print('mod.path = {!r}'.format(mod.path))

            known_one_letter_types = {
                # 'T', 'K', 'A', 'B', 'C', 'V',
                'DT',
                'KT',
                'VT',
                'T'
            }
            for type_var_name in set(gen.import_tracker.required_names) & set(
                    known_one_letter_types):
                gen.add_typing_import('TypeVar')
                # gen.add_import_line('from typing import {}\n'.format('TypeVar'))
                gen._output = [
                    '{} = TypeVar("{}")\n'.format(type_var_name, type_var_name)
                ] + gen._output

            custom_types = {'Hasher'}
            for type_var_name in set(
                    gen.import_tracker.required_names) & set(custom_types):
                gen.add_typing_import('TypeVar')
                # gen.add_import_line('from typing import {}\n'.format('TypeVar'))
                gen._output = [
                    '{} = TypeVar("{}")\n'.format(type_var_name, type_var_name)
                ] + gen._output

            # Hack for specific module
            # if mod.path.endswith('util_path.py'):
            #     gen.add_typing_import('TypeVar')
            #     # hack for variable inheritence
            #     gen._output = ['import pathlib\nimport os\n', "_PathBase = pathlib.WindowsPath if os.name == 'nt' else pathlib.PosixPath\n"] + gen._output

            text = ''.join(gen.output())
            text = postprocess_hacks(text, mod)

            # Write output to file.
            # subdir = ub.Path(target).parent
            # if subdir and not os.path.isdir(subdir):
            #     os.makedirs(subdir)
            generated[target] = text
            # with open(target, 'w') as file:
            #     file.write(text)
    return generated
コード例 #6
0
ファイル: gen_typed_stubs.py プロジェクト: Erotemic/ubelt
def generate_typed_stubs():
    """
    Attempt to use google-style docstrings, xdoctest, and mypy to generate
    typed stub files.

    pyfile mypy.stubgen
    # Delete compiled verisons so we can hack it
    rm ~/.pyenv/versions/3.8.6/envs/pyenv3.8.6/lib/python3.8/site-packages/mypy/*.cpython-38-x86_64-linux-gnu.so

    git clone https://github.com/python/mypy.git
    cd mypy
    pip install -e .


    pip install MonkeyType

    monkeytype run run_tests.py
    monkeytype stub ubelt.util_dict

    from typing import TypeVar
    from mypy.applytype import get_target_type
    z = TypeVar('Iterable')
    get_target_type(z)

    from mypy.expandtype import expand_type
    expand_type(z, env={})

    from mypy.types import get_proper_type
    get_proper_type(z)
    get_proper_type(dict)
    import typing
    get_proper_type(typing.Iterable)

    from mypy.types import deserialize_type, UnboundType
    import mypy.types as mypy_types
    z = UnboundType('Iterable')
    get_proper_type(dict)

    from mypy.fastparse import parse_type_string
    parse_type_string('dict', 'dict', 0, 0)
    z = parse_type_string('typing.Iterator', 'Any', 0, 0)
    get_proper_type(z)

    """
    import pathlib
    import ubelt
    import os
    import autoflake
    import yapf
    from mypy import stubgen
    from mypy import defaults
    from xdoctest import static_analysis
    from os.path import dirname, join
    ubelt_dpath = dirname(ubelt.__file__)

    for p in pathlib.Path(ubelt_dpath).glob('*.pyi'):
        p.unlink()
    files = list(
        static_analysis.package_modpaths(ubelt_dpath,
                                         recursive=True,
                                         with_libs=1,
                                         with_pkg=0))
    files = [f for f in files if 'deprecated' not in f]
    # files = [join(ubelt_dpath, 'util_dict.py')]

    options = stubgen.Options(pyversion=defaults.PYTHON3_VERSION,
                              no_import=True,
                              doc_dir='',
                              search_path=[],
                              interpreter=sys.executable,
                              ignore_errors=False,
                              parse_only=True,
                              include_private=False,
                              output_dir=dirname(ubelt_dpath),
                              modules=[],
                              packages=[],
                              files=files,
                              verbose=False,
                              quiet=False,
                              export_less=True)
    # generate_stubs(options)

    mypy_opts = stubgen.mypy_options(options)
    py_modules, c_modules = stubgen.collect_build_targets(options, mypy_opts)

    # Collect info from docs (if given):
    sigs = class_sigs = None  # type: Optional[Dict[str, str]]
    if options.doc_dir:
        sigs, class_sigs = stubgen.collect_docs_signatures(options.doc_dir)

    # Use parsed sources to generate stubs for Python modules.
    stubgen.generate_asts_for_modules(py_modules, options.parse_only,
                                      mypy_opts, options.verbose)

    for mod in py_modules:
        assert mod.path is not None, "Not found module was not skipped"
        target = mod.module.replace('.', '/')
        if os.path.basename(mod.path) == '__init__.py':
            target += '/__init__.pyi'
        else:
            target += '.pyi'
        target = join(options.output_dir, target)
        files.append(target)
        with stubgen.generate_guarded(mod.module, target,
                                      options.ignore_errors, options.verbose):
            stubgen.generate_stub_from_ast(mod, target, options.parse_only,
                                           options.pyversion,
                                           options.include_private,
                                           options.export_less)

            gen = ExtendedStubGenerator(
                mod.runtime_all,
                pyversion=options.pyversion,
                include_private=options.include_private,
                analyzed=not options.parse_only,
                export_less=options.export_less)
            assert mod.ast is not None, "This function must be used only with analyzed modules"
            mod.ast.accept(gen)
            # print('gen.import_tracker.required_names = {!r}'.format(gen.import_tracker.required_names))
            # print(gen.import_tracker.import_lines())

            print('mod.path = {!r}'.format(mod.path))

            known_one_letter_types = {
                # 'T', 'K', 'A', 'B', 'C', 'V',
                'DT',
                'KT',
                'VT',
                'T'
            }
            for type_var_name in set(gen.import_tracker.required_names) & set(
                    known_one_letter_types):
                gen.add_typing_import('TypeVar')
                # gen.add_import_line('from typing import {}\n'.format('TypeVar'))
                gen._output = [
                    '{} = TypeVar("{}")\n'.format(type_var_name, type_var_name)
                ] + gen._output

            custom_types = {'Hasher'}
            for type_var_name in set(
                    gen.import_tracker.required_names) & set(custom_types):
                gen.add_typing_import('TypeVar')
                # gen.add_import_line('from typing import {}\n'.format('TypeVar'))
                gen._output = [
                    '{} = TypeVar("{}")\n'.format(type_var_name, type_var_name)
                ] + gen._output

            # Hack for specific module
            # if mod.path.endswith('util_path.py'):
            #     gen.add_typing_import('TypeVar')
            #     # hack for variable inheritence
            #     gen._output = ['import pathlib\nimport os\n', "_PathBase = pathlib.WindowsPath if os.name == 'nt' else pathlib.PosixPath\n"] + gen._output

            text = ''.join(gen.output())
            # Hack to remove lines caused by Py2 compat
            text = text.replace('Generator = object\n', '')
            text = text.replace('select = NotImplemented\n', '')
            text = text.replace('iteritems: Any\n', '')
            text = text.replace('text_type = str\n', '')
            text = text.replace('text_type: Any\n', '')
            text = text.replace('string_types: Any\n', '')
            text = text.replace('PY2: Any\n', '')
            text = text.replace('__win32_can_symlink__: Any\n', '')
            # text = text.replace('odict = OrderedDict', '')
            # text = text.replace('ddict = defaultdict', '')

            if mod.path.endswith('util_path.py'):
                # hack for forward reference
                text = text.replace(' -> Path:', " -> 'Path':")
                text = text.replace('class Path(_PathBase)', "class Path")

            # Format the PYI file nicely
            text = autoflake.fix_code(text,
                                      remove_unused_variables=True,
                                      remove_all_unused_imports=True)

            # import autopep8
            # text = autopep8.fix_code(text, options={
            #     'aggressive': 0,
            #     'experimental': 0,
            # })

            style = yapf.yapf_api.style.CreatePEP8Style()
            text, _ = yapf.yapf_api.FormatCode(text,
                                               filename='<stdin>',
                                               style_config=style,
                                               lines=None,
                                               verify=False)

            # print(text)

            # Write output to file.
            subdir = dirname(target)
            if subdir and not os.path.isdir(subdir):
                os.makedirs(subdir)
            with open(target, 'w') as file:
                file.write(text)
コード例 #7
0
ファイル: core.py プロジェクト: gotcha/xdoctest
def package_calldefs(modpath_or_name, exclude=[], ignore_syntax_errors=True,
                     analysis='static'):
    """
    Statically generates all callable definitions in a module or package

    Args:
        modpath_or_name (str): path to or name of the module to be tested

        exclude (List[str]): glob-patterns of file names to exclude

        ignore_syntax_errors (bool, default=True):
            if False raise an error when syntax errors occur in a doctest

        analysis (str, default='static'):
            if 'static', only static analysis is used to parse call
            definitions. If 'auto', uses dynamic analysis for compiled python
            extensions, but static analysis elsewhere, if 'dynamic', then
            dynamic analysis is used to parse all calldefs.

    Example:
        >>> modpath_or_name = 'xdoctest.core'
        >>> testables = list(package_calldefs(modpath_or_name))
        >>> assert len(testables) == 1
        >>> calldefs, modpath = testables[0]
        >>> assert static_analysis.modpath_to_modname(modpath) == modpath_or_name
        >>> assert 'package_calldefs' in calldefs
    """
    pkgpath = _rectify_to_modpath(modpath_or_name)

    modpaths = static_analysis.package_modpaths(pkgpath, with_pkg=True,
                                                with_libs=True)
    modpaths = list(modpaths)
    for modpath in modpaths:
        modname = static_analysis.modpath_to_modname(modpath)
        if any(fnmatch(modname, pat) for pat in exclude):
            continue
        if not exists(modpath):
            warnings.warn(
                'Module {} does not exist. '
                'Is it an old pyc file?'.format(modname))
            continue

        # backwards compatibility hacks
        if '--allow-xdoc-dynamic' in sys.argv:
            analysis = 'auto'
        if '--xdoc-force-dynamic' in sys.argv:
            analysis = 'dynamic'

        needs_dynamic = modpath.endswith(
            static_analysis._platform_pylib_exts())

        if analysis == 'static':
            do_dynamic = False
        elif analysis == 'dynamic':
            do_dynamic = True
        elif analysis == 'auto':
            do_dynamic = needs_dynamic
        else:
            raise KeyError(analysis)

        if do_dynamic:
            try:
                calldefs = dynamic_analysis.parse_dynamic_calldefs(modpath)
            except (ImportError, RuntimeError) as ex:
                # Some modules are just c modules
                msg = 'Cannot dynamically parse module={} at path={}.\nCaused by: {!r} {}'
                msg = msg.format(modname, modpath, type(ex), ex)
                warnings.warn(msg)
            except Exception as ex:
                msg = 'Cannot dynamically parse module={} at path={}.\nCaused by: {!r} {}'
                msg = msg.format(modname, modpath, type(ex), ex)
                warnings.warn(msg)
                raise
            else:
                yield calldefs, modpath
        else:
            if needs_dynamic:
                # Some modules can only be parsed dynamically
                continue
            try:
                calldefs = static_analysis.parse_calldefs(fpath=modpath)
            except SyntaxError as ex:
                # Handle error due to the actual code containing errors
                msg = 'Cannot parse module={} at path={}.\nCaused by: {}'
                msg = msg.format(modname, modpath, ex)
                if ignore_syntax_errors:
                    warnings.warn(msg)  # real code or docstr contained errors
                    continue
                else:
                    raise SyntaxError(msg)
            else:
                yield calldefs, modpath