Exemple #1
0
 def run(self):
     print('Processing "%s" ..' % self.filename, file=sys.stderr)
     try:
         index = cindex.Index(cindex.conf.lib.clang_createIndex(
             False, True))
         tu = index.parse(self.filename, self.parameters)
         extract(self.filename, tu.cursor, '', self.output)
     finally:
         job_semaphore.release()
Exemple #2
0
 def run(self):
     global errors_detected
     print('Processing "%s" ..' % self.filename, file=sys.stderr)
     try:
         index = cindex.Index(
             cindex.conf.lib.clang_createIndex(False, True))
         tu = index.parse(self.filename, self.parameters)
         extract(self.filename, tu.cursor, '', self.output)
     except BaseException:
         errors_detected = True
         raise
     finally:
         job_semaphore.release()
Exemple #3
0
 def run(self):
     print('Processing "%s" ..' % self.filename, file=sys.stderr)
     context = {
         "filename": self.filename,
         "parameters": self.parameters,
         "classes": {},
         "class_templates": {},
         "class_template_instantiations": {},
     }
     index = cindex.Index(cindex.conf.lib.clang_createIndex(False, True))
     tu = index.parse(self.filename, self.parameters)
     extract_nodes(self.filename, tu.cursor, self.nodes)
     for n in self.nodes:
         #            dump_node(n)
         p = getparent(n, self.nodes)
         if p is None or p.kind == CursorKind.NAMESPACE or p.kind == CursorKind.TRANSLATION_UNIT:
             if n.kind in NODE_HANDLERS.keys():
                 func = NODE_HANDLERS[n.kind]
                 func(n, self.nodes, self.output, '  ', 'm', context)
Exemple #4
0
def main():
    parameters = ['-x', 'c++', '-D__MKDOC_PY__']
    filenames = []

    library_file = None
    if platform.system() == 'Darwin':
        completed_process = subprocess.run(['xcrun', '--find', 'clang'],
                                           stdout=subprocess.PIPE,
                                           encoding='utf-8')
        if completed_process.returncode == 0:
            toolchain_dir = os.path.dirname(
                os.path.dirname(completed_process.stdout.strip()))
            library_file = os.path.join(toolchain_dir, 'lib', 'libclang.dylib')
        completed_process = subprocess.run(['xcrun', '--show-sdk-path'],
                                           stdout=subprocess.PIPE,
                                           encoding='utf-8')
        if completed_process.returncode == 0:
            sdkroot = completed_process.stdout.strip()
            if os.path.exists(sdkroot):
                parameters.append('-isysroot')
                parameters.append(sdkroot)
    elif platform.system() == 'Linux':
        library_file = '/usr/lib/llvm-6.0/lib/libclang.so'
    if library_file and os.path.exists(library_file):
        cindex.Config.set_library_path(os.path.dirname(library_file))

    quiet = False
    std = '-std=c++11'
    root_name = 'mkdoc_doc'
    ignore_patterns = []
    output_filename = None

    for item in sys.argv[1:]:
        if item == '-quiet':
            quiet = True
        elif item.startswith('-output='):
            output_filename = item[len('-output='):]
        elif item.startswith('-std='):
            std = item
        elif item.startswith('-root-name='):
            root_name = item[len('-root-name='):]
        elif item.startswith('-exclude-hdr-patterns='):
            ignore_patterns.append(item[len('-exclude-hdr-patterns='):])
        elif item.startswith('-'):
            parameters.append(item)
        else:
            filenames.append(item)

    parameters.append(std)

    if output_filename is None or len(filenames) == 0:
        eprint('Syntax: %s -output=<file> [.. a list of header files ..]' %
               sys.argv[0])
        sys.exit(1)

    f = open(output_filename, 'w', encoding='utf-8')
    # N.B. We substitute the `GENERATED FILE...` bits in this fashion because
    # otherwise Reviewable gets confused.
    f.write('''#pragma once

// {0} {1}
// This file contains docstrings for the Python bindings that were
// automatically extracted by mkdoc.py.

#if defined(__GNUG__)
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-variable"
#endif

'''.format('GENERATED FILE', 'DO NOT EDIT'))

    # Determine project include directories.
    # N.B. For simplicity when using with Bazel, we do not try to get canonical
    # file paths for determining include files.
    include_paths = []
    for param in parameters:
        # Only check for normal include directories.
        if param.startswith("-I"):
            include_paths.append(param[2:])
    # Use longest include directories first to get shortest include file
    # overall.
    include_paths = list(sorted(include_paths, key=len))[::-1]
    include_files = []
    # Create mapping from filename to include file.
    include_file_map = FileDict()
    for filename in filenames:
        for include_path in include_paths:
            prefix = include_path + "/"
            if filename.startswith(prefix):
                include_file = filename[len(prefix):]
                break
        else:
            raise RuntimeError(
                "Filename not incorporated into -I includes: {}".format(
                    filename))
        for p in ignore_patterns:
            if fnmatch(include_file, p):
                break
        else:
            include_files.append(include_file)
            include_file_map[filename] = include_file
    assert len(include_files) > 0
    # Generate the glue include file, which will include all relevant include
    # files, and parse. Use a tempdir that is relative to the output file for
    # usage with Bazel.
    tmpdir = output_filename + ".tmp_artifacts"
    os.mkdir(tmpdir)
    glue_filename = os.path.join(tmpdir, "mkdoc_glue.h")
    with open(glue_filename, 'w') as glue_f:
        for include_file in sorted(include_files):
            line = "#include \"{}\"".format(include_file)
            glue_f.write(line + "\n")
            f.write("// " + line + "\n")
        f.write("\n")
        glue_f.flush()
        if not quiet:
            eprint("Parse headers...")
        index = cindex.Index(cindex.conf.lib.clang_createIndex(False, True))
        translation_unit = index.parse(
            glue_filename,
            parameters,
            options=cindex.TranslationUnit.PARSE_DETAILED_PROCESSING_RECORD)
    shutil.rmtree(tmpdir)
    # Extract symbols.
    if not quiet:
        eprint("Extract relevant symbols...")
    symbol_tree = SymbolTree()
    extract(include_file_map, translation_unit.cursor, symbol_tree)
    # Write header file.
    if not quiet:
        eprint("Writing header file...")
    try:
        print_symbols(f, root_name, symbol_tree.root)
    except UnicodeEncodeError as e:
        # User-friendly error for #9903.
        print("""
Encountered unicode error: {}
If you are on Ubuntu, please ensure you have en_US.UTF-8 locales generated:
    sudo apt-get install --no-install-recommends  locales
    sudo locale-gen en_US.UTF-8
""".format(e),
              file=sys.stderr)
        sys.exit(1)

    f.write('''
#if defined(__GNUG__)
#pragma GCC diagnostic pop
#endif
''')
Exemple #5
0
def main():
    parameters = ['-x', 'c++', '-D__MKDOC_PY__']
    filenames = []

    if platform.system() == 'Darwin':
        dev_path = '/Applications/Xcode.app/Contents/Developer/'
        lib_dir = dev_path + 'Toolchains/XcodeDefault.xctoolchain/usr/lib/'
        sdk_dir = dev_path + 'Platforms/MacOSX.platform/Developer/SDKs'
        libclang = lib_dir + 'libclang.dylib'

        if os.path.exists(libclang):
            cindex.Config.set_library_path(os.path.dirname(libclang))

        if os.path.exists(sdk_dir):
            sysroot_dir = os.path.join(sdk_dir, next(os.walk(sdk_dir))[1][0])
            parameters.append('-isysroot')
            parameters.append(sysroot_dir)

    quiet = False
    std = '-std=c++11'
    root_name = 'mkdoc_doc'
    ignore_patterns = []
    output_filename = None

    for item in sys.argv[1:]:
        if item == '-quiet':
            quiet = True
        elif item.startswith('-output='):
            output_filename = item[len('-output='):]
        elif item.startswith('-std='):
            std = item
        elif item.startswith('-root-name='):
            root_name = item[len('-root-name='):]
        elif item.startswith('-exclude-hdr-patterns='):
            ignore_patterns.append(item[len('-exclude-hdr-patterns='):])
        elif item.startswith('-'):
            parameters.append(item)
        else:
            filenames.append(item)

    parameters.append(std)

    if output_filename is None or len(filenames) == 0:
        eprint('Syntax: %s -output=<file> [.. a list of header files ..]' %
               sys.argv[0])
        sys.exit(1)

    f = open(output_filename, 'w')
    # N.B. We substitute the `GENERATED FILE...` bits in this fashion because
    # otherwise Reviewable gets confused.
    f.write('''#pragma once

// {0} {1}
// This file contains docstrings for the Python bindings that were
// automatically extracted by mkdoc.py.

#if defined(__GNUG__)
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-variable"
#endif

'''.format('GENERATED FILE', 'DO NOT EDIT'))

    # Determine project include directories.
    # N.B. For simplicity when using with Bazel, we do not try to get canonical
    # file paths for determining include files.
    include_paths = []
    for param in parameters:
        # Only check for normal include directories.
        if param.startswith("-I"):
            include_paths.append(param[2:])
    # Use longest include directories first to get shortest include file
    # overall.
    include_paths = list(sorted(include_paths, key=len))[::-1]
    include_files = []
    # Create mapping from filename to include file.
    include_file_map = FileDict()
    for filename in filenames:
        for include_path in include_paths:
            prefix = include_path + "/"
            if filename.startswith(prefix):
                include_file = filename[len(prefix):]
                break
        else:
            raise RuntimeError(
                "Filename not incorporated into -I includes: {}".format(
                    filename))
        for p in ignore_patterns:
            if fnmatch(include_file, p):
                break
        else:
            include_files.append(include_file)
            include_file_map[filename] = include_file
    assert len(include_files) > 0
    # Generate the glue include file, which will include all relevant include
    # files, and parse. Add a unique prefix so we do not leak accidentally leak
    # in paths in `/tmp`.
    dir_prefix = mkdtemp(prefix="drake_mkdoc_")
    glue_include_file = NamedTemporaryFile('w',
                                           prefix="glue_include_file_",
                                           dir=dir_prefix)
    with glue_include_file:
        for include_file in sorted(include_files):
            line = "#include \"{}\"".format(include_file)
            glue_include_file.write(line + "\n")
            f.write("// " + line + "\n")
        f.write("\n")
        glue_include_file.flush()
        if not quiet:
            eprint("Parse headers...")
        index = cindex.Index(cindex.conf.lib.clang_createIndex(False, True))
        translation_unit = index.parse(glue_include_file.name, parameters)
    os.rmdir(dir_prefix)
    # Extract symbols.
    if not quiet:
        eprint("Extract relevant symbols...")
    symbol_tree = SymbolTree()
    extract(include_file_map, translation_unit.cursor, symbol_tree)
    # Write header file.
    if not quiet:
        eprint("Writing header file...")
    try:
        print_symbols(f, root_name, symbol_tree.root)
    except UnicodeEncodeError as e:
        # User-friendly error for #9903.
        print("""
Encountered unicode error: {}
If you are on Ubuntu, please ensure you have en_US.UTF-8 locales generated:
    sudo apt-get install --no-install-recommends  locales
    sudo locale-gen en_US.UTF-8
""".format(e),
              file=sys.stderr)
        sys.exit(1)

    f.write('''
#if defined(__GNUG__)
#pragma GCC diagnostic pop
#endif
''')
Exemple #6
0
def main():
    parameters = ['-x', 'c++', '-D__MKDOC_PY__']
    add_library_paths(parameters)
    filenames = []

    quiet = False
    std = '-std=c++11'
    root_name = 'mkdoc_doc'
    ignore_patterns = []
    output_filename = None
    output_filename_xml = None

    # TODO(m-chaturvedi): Consider using argparse.
    for item in sys.argv[1:]:
        if item == '-quiet':
            quiet = True
        elif item.startswith('-output='):
            output_filename = item[len('-output='):]
        elif item.startswith('-output_xml='):
            output_filename_xml = item[len('-output_xml='):]
        elif item.startswith('-std='):
            std = item
        elif item.startswith('-ignore-dirs-for-coverage='):
            ignore_dir_str = item[len('-ignore-dirs-for-coverage='):]
            ignore_dirs_for_coverage = None
            if ignore_dir_str:
                ignore_dirs_for_coverage = tuple(ignore_dir_str.split(','))
        elif item.startswith('-root-name='):
            root_name = item[len('-root-name='):]
        elif item.startswith('-exclude-hdr-patterns='):
            ignore_patterns.append(item[len('-exclude-hdr-patterns='):])
        elif item.startswith('-'):
            parameters.append(item)
        else:
            filenames.append(item)

    parameters.append(std)

    if output_filename is None or len(filenames) == 0:
        eprint('Syntax: %s -output=<file> [.. a list of header files ..]' %
               sys.argv[0])
        sys.exit(1)

    f = open(output_filename, 'w', encoding='utf-8')
    f_xml = None
    if output_filename_xml is not None:
        f_xml = open(output_filename_xml, 'w')

    # N.B. We substitute the `GENERATED FILE...` bits in this fashion because
    # otherwise Reviewable gets confused.
    f.write('''#pragma once

// {0} {1}
// This file contains docstrings for the Python bindings that were
// automatically extracted by mkdoc.py.

#if defined(__GNUG__)
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-variable"
#endif

'''.format('GENERATED FILE', 'DO NOT EDIT'))

    # Determine project include directories.
    # N.B. For simplicity when using with Bazel, we do not try to get canonical
    # file paths for determining include files.
    include_paths = []
    for param in parameters:
        # Only check for normal include directories.
        if param.startswith("-I"):
            include_paths.append(param[2:])
    # Use longest include directories first to get shortest include file
    # overall.
    include_paths = list(sorted(include_paths, key=len))[::-1]
    include_files = []
    # Create mapping from filename to include file.
    include_file_map = FileDict()
    used_ignore_patterns = set()
    for filename in filenames:
        for include_path in include_paths:
            prefix = include_path + "/"
            if filename.startswith(prefix):
                include_file = filename[len(prefix):]
                break
        else:
            raise RuntimeError(
                "Filename not incorporated into -I includes: {}".format(
                    filename))
        for p in ignore_patterns:
            if fnmatch(include_file, p):
                used_ignore_patterns.add(p)
                break
        else:
            include_files.append(include_file)
            include_file_map[filename] = include_file
    assert len(include_files) > 0
    unused_ignore_patterns = set(ignore_patterns) - used_ignore_patterns
    if unused_ignore_patterns:
        print(f"Unused ignore patterns: {unused_ignore_patterns}")
    # Generate the glue include file, which will include all relevant include
    # files, and parse. Use a tempdir that is relative to the output file for
    # usage with Bazel.
    tmpdir = output_filename + ".tmp_artifacts"
    os.mkdir(tmpdir)
    glue_filename = os.path.join(tmpdir, "mkdoc_glue.h")
    with open(glue_filename, 'w') as glue_f:
        for include_file in sorted(include_files):
            line = "#include \"{}\"".format(include_file)
            glue_f.write(line + "\n")
            f.write("// " + line + "\n")
        f.write("\n")
        glue_f.flush()
        if not quiet:
            eprint("Parse headers...")
        index = cindex.Index(cindex.conf.lib.clang_createIndex(False, True))
        translation_unit = index.parse(
            glue_filename,
            parameters,
            options=cindex.TranslationUnit.PARSE_DETAILED_PROCESSING_RECORD)
        if not translation_unit:
            raise RuntimeError(
                "Parsing headers using the clang library failed")
        severities = [
            diagnostic.severity for diagnostic in translation_unit.diagnostics
            if diagnostic.severity >= cindex.Diagnostic.Error
        ]
        if severities:
            raise RuntimeError(
                ("Parsing headers using the clang library failed with {} "
                 "error(s) and {} fatal error(s)").format(
                     severities.count(cindex.Diagnostic.Error),
                     severities.count(cindex.Diagnostic.Fatal)))
    shutil.rmtree(tmpdir)
    # Extract symbols.
    if not quiet:
        eprint("Extract relevant symbols...")
    symbol_tree = SymbolTree()
    extract(include_file_map, translation_unit.cursor, symbol_tree)
    # Write header file.
    if not quiet:
        eprint("Writing header file...")
    try:
        tree_parser = {
            "tree_parser_doc": [],
            "tree_parser_xpath": [ET.Element("Root")],
            "ignore_dirs_for_coverage": ignore_dirs_for_coverage
        }

        print_symbols(f, root_name, symbol_tree.root, **tree_parser)
    except UnicodeEncodeError as e:
        # User-friendly error for #9903.
        print("""
Encountered unicode error: {}
If you are on Ubuntu, please ensure you have en_US.UTF-8 locales generated:
    sudo apt-get install --no-install-recommends  locales
    sudo locale-gen en_US.UTF-8
""".format(e),
              file=sys.stderr)
        sys.exit(1)

    f.write('''
#if defined(__GNUG__)
#pragma GCC diagnostic pop
#endif
''')
    if f_xml is not None:
        f_xml.write(prettify(tree_parser["tree_parser_xpath"][0]))
Exemple #7
0
        for i in node.get_children():
            extract_nodes(filename, i, output, sub_prefix)
    if node.kind in PRINT_LIST:
        output.append(node)
    else:
        #        print("Ignored: {}".format(node.kind))
        pass
    return output


def dump_node(n, prefix=''):
    print(prefix + "name: {0:40} {2:20} displayname: {1}".format(
        n.spelling, n.displayname,
        str(n.kind).replace('CursorKind.', '')))


def dump(all):
    i = 0
    for node in all:
        dump_node(node, "{:3} ".format(i))
        i += 1


index = cindex.Index(cindex.conf.lib.clang_createIndex(False, True))
tu = index.parse(filename, parameters)
nodes = extract_nodes(filename, tu.cursor)
#s = [x for x in nodes if x.kind==CursorKind.STRUCT_DECL]
#s0=s[0]
#s1=s[1]
dump(nodes)