def _build_tree(file_path, compiler_parameters): file_path = os.path.abspath(file_path) def build_tree_recursive(cursor, parent, access): node = Node(cursor, parent, access) for child in cursor.get_children(): if child.kind == CursorKind.COMPOUND_STMT: # Ignore nodes defining code logic continue if not child.location.file or os.path.abspath( child.location.file.name) != file_path: # Ignore nodes from other files continue child_access = child.access_specifier if child_access == AccessSpecifier.INVALID: child_access = access child_node = build_tree_recursive(child, node, child_access) node.children.append(child_node) assert child_node.parent == node return node index = Index(conf.lib.clang_createIndex(False, True)) translation_unit = index.parse( file_path, compiler_parameters, options=TranslationUnit.PARSE_INCOMPLETE | TranslationUnit.PARSE_SKIP_FUNCTION_BODIES | TranslationUnit.PARSE_INCLUDE_BRIEF_COMMENTS_IN_CODE_COMPLETION) return build_tree_recursive(translation_unit.cursor, None, AccessSpecifier.NONE)
def ParseCurrentFile(): global parseLock global parseFile global parseContent global parseNeeded global parseLastFile global parseTus global parseScopeNames global parsingCurrentState with parseLock: if not parseNeeded: return fileToParse = parseFile contentToParse = parseContent parseNeeded = False parsingCurrentState = "Parsing %s" % fileToParse command = None try: if g_api is not None: command = g_api.get_file_args(fileToParse) except Exception as e: with parseLock: parsingCurrentState = str(e) return parsingStatePrefix = "" unsaved_files = [(fileToParse, contentToParse)] if command == None: if fileToParse.endswith(".h") or fileToParse.endswith(".hpp"): unsaved_files.append(("temporary_source.cpp", "#include \"%s\"\n#include <stdint.h>\nint main() { return 0; }\n" % fileToParse)) command = ["g++", "temporary_source.cpp"] parsingStatePrefix = "[HEADER] " elif fileToParse.endswith(".cpp") or fileToParse.endswith(".cc") or fileToParse.endswith(".c") or fileToParse.endswith(".cxx"): command = ["g++", fileToParse] parsingStatePrefix = "[SOURCE] " else: with parseLock: parsingCurrentState = "Can't find command line arguments" return index = Index.create() tu = index.parse(None, command + ["-I%s" % g_builtin_header_path], unsaved_files=unsaved_files, options = TranslationUnit.PARSE_DETAILED_PROCESSING_RECORD) with parseLock: parseTus[fileToParse] = tu parsingCurrentState = "%sPartially parsed %s" % (parsingStatePrefix, fileToParse) scopeNames = [] scopeDepths = [] # create a new tu so that we don't walk it from two different threads index = Index.create() tu = index.parse(None, command + ["-I%s" % g_builtin_header_path], unsaved_files=unsaved_files, options = TranslationUnit.PARSE_DETAILED_PROCESSING_RECORD) PopulateScopeNames(GetCursorForFile(tu, os.path.abspath(fileToParse)), scopeNames, scopeDepths) with parseLock: parseScopeNames[fileToParse] = scopeNames parsingCurrentState = "%sFully parsed %s" % (parsingStatePrefix, fileToParse)
def skip_if_no_clang(): """ Decorate your test with this to skip it if clang isn't present. """ try: Index.create() return skipIf(False, '') except LibclangError as error: return skip(str(error))
def clang_available(cls): """ Checks if Clang is available and ready to use. :return: True if Clang is available, a description of the error else. """ try: Index.create() return True except LibclangError as error: # pragma: no cover return str(error)
def test_class_definitions(self): idx = Index.create() translation_unit_class_defn = idx.parse( "tools/envoy_headersplit/code_corpus/class_defn.h", ["-x", "c++"]) defns_cursors = headersplit.class_definitions(translation_unit_class_defn.cursor) defns_names = [cursor.spelling for cursor in defns_cursors] self.assertEqual(defns_names, ["Foo", "Bar", "FooBar", "DeadBeaf"]) idx = Index.create() translation_unit_class_defn = idx.parse( "tools/envoy_headersplit/code_corpus/class_defn_without_namespace.h", ["-x", "c++"]) defns_cursors = headersplit.class_definitions(translation_unit_class_defn.cursor) defns_names = [cursor.spelling for cursor in defns_cursors] self.assertEqual(defns_names, [])
def clang_available(cls): """ Checks if Clang is available and ready to use. :return: True if Clang is available, a description of the error else. """ if get_distribution('libclang-py3').version != '3.4.0': return ClangBear.name + ' requires clang 3.4.0' try: Index.create() return True except LibclangError as error: # pragma: no cover return str(error)
def clang_available(cls): """ Checks if Clang is available and ready to use. :return: True if Clang is available, a description of the error else. """ if get_distribution('libclang-py3').version != '3.4.0': return ClangBear.name + ' requires clang 3.4.0' try: # pragma nt: no cover Index.create() return True except LibclangError as error: # pragma: no cover return str(error)
def __init__(self): self.index = Index.create() self.ast = Object() self.mapping = { } super(Parser, self).__init__()
def test_location(): index = Index.create() tu = index.parse('t.c', unsaved_files = [('t.c',baseInput)]) for n in tu.cursor.get_children(): if n.spelling == 'one': assert_location(n.location,line=1,column=5,offset=4) if n.spelling == 'two': assert_location(n.location,line=2,column=5,offset=13) # adding a linebreak at top should keep columns same tu = index.parse('t.c', unsaved_files = [('t.c',"\n"+baseInput)]) for n in tu.cursor.get_children(): if n.spelling == 'one': assert_location(n.location,line=2,column=5,offset=5) if n.spelling == 'two': assert_location(n.location,line=3,column=5,offset=14) # adding a space should affect column on first line only tu = index.parse('t.c', unsaved_files = [('t.c'," "+baseInput)]) for n in tu.cursor.get_children(): if n.spelling == 'one': assert_location(n.location,line=1,column=6,offset=5) if n.spelling == 'two': assert_location(n.location,line=2,column=5,offset=14)
def __init__(self, lang, header, api_headers, check_all, args, verbose): index = Index.create() self.is_cpp = True if lang == 'c++' else False self.clang_args = ['-x', lang] self.translation_unit = index.parse(header, args + self.clang_args, options=1) if verbose: for diag in self.translation_unit.diagnostics: if diag.severity == 2: msg = '\033[93mWARNING : ' elif diag.severity == 3: msg = '\033[91mERROR : ' elif diag.severity == 4: msg = '\033[91mFATAL : ' msg += '{} at {} line {}, column {}\033[00m' print (msg.format(diag.spelling, diag.location.file, diag.location.line, diag.location.column)) self.api_headers = api_headers self.check_all = check_all self.enum_constant_decls = [] self.function_decls = [] self.var_decls = [] self.macro_defs = [] self.record_decls = [] self.namespaces = []
def get_from_header(header_name): expected_basename = header_name.split('/')[-1] index = Index.create() tu = index.parse(header_name, options=TranslationUnit.PARSE_DETAILED_PROCESSING_RECORD) # | TranslationUnit.PARSE_SKIP_FUNCTION_BODIES for cursor in tu.cursor.get_children(): #print(cursor.kind) basename = str(cursor.location.file).split('/')[-1] if basename != expected_basename: continue #print(cursor.kind, cursor.displayname) if cursor.kind == CursorKind.FUNCTION_DECL: function_names_from_source.append(cursor.spelling) sig = cursor.result_type.spelling + ' ' + cursor.spelling + '(' args = [] for arg in cursor.get_children(): if arg.kind != CursorKind.PARM_DECL: continue args.append(arg.type.spelling + ' ' + arg.spelling) #if cursor.type.is_function_variadic(): # args.append('...') sig += ', '.join(args) + ')' #print(sig) if cursor.kind == CursorKind.TYPEDEF_DECL: typedef_names_from_source.append(cursor.spelling) if cursor.kind == CursorKind.ENUM_DECL: enum_names_from_source.append(cursor.spelling) if cursor.kind == CursorKind.MACRO_DEFINITION and not cursor.spelling.endswith( '_INCLUDED'): macro_names_from_source.append(cursor.spelling)
def main(): import sys from clang.cindex import Index, Config from io import open # TODO: Don't hard code the clang library path Config.set_library_path("/usr/lib/llvm-3.3/lib") if len(sys.argv) == 1 or len(sys.argv) > 3: usage() sys.exit(1) cppFile = str() overwrite = False if "-o" in sys.argv: overwrite = True cppFile = sys.argv[len(sys.argv) - 1] index = Index.create() transUnit = index.parse(cppFile) docBlocks = get_doc_comments(transUnit.cursor, cppFile) source = source_with_doc_blocks(cppFile, docBlocks) if overwrite: with open(cppFile, "w") as file: file.write(unicode(source)) else: sys.stdout.write(unicode(source))
def main(): if len(sys.argv) < 2: sys.stderr.write('missing arguments (see -h for help)\n') sys.exit(1) if '-h' in sys.argv: sys.stderr.write('%s\n\n' % __doc__.strip()) sys.stderr.write('usage example: %s /usr/include/stdio.h | dot -Txlib\n' % sys.argv[0]) sys.exit(1) logging.basicConfig(level=logging.INFO) index = Index.create() tu = index.parse(None, sys.argv[1:]) rc = 0 if tu: emit_graph(sys.stdout, tu) log_map = { 0: logger.debug, 1: logger.info, 2: logger.warning, 3: logger.error, 4: logger.fatal } rc = 0 for d in tu.diagnostics: rc = max(d.severity, rc) log = log_map[d.severity] log('in file %s, line %d:%s' % (d.location.file.name, d.location.line, d.spelling)) else: rc = 1 sys.exit(rc)
def test_code_complete(): index = Index.create() files = [('fake.c', """ /// Aaa. int test1; /// Bbb. void test2(void); void f() { } """)] tu = TranslationUnit.from_source( 'fake.c', ['-std=c99'], unsaved_files=files, options=TranslationUnit. PARSE_INCLUDE_BRIEF_COMMENTS_IN_CODE_COMPLETION, index=index) cr = tu.codeComplete('fake.c', 9, 1, unsaved_files=files, include_brief_comments=True) expected = [ "{'int', ResultType} | {'test1', TypedText} || Priority: 50 || Availability: Available || Brief comment: Aaa.", "{'void', ResultType} | {'test2', TypedText} | {'(', LeftParen} | {')', RightParen} || Priority: 50 || Availability: Available || Brief comment: Bbb.", "{'return', TypedText} || Priority: 40 || Availability: Available || Brief comment: None" ] check_completion_results(cr, expected)
def get_fake_function_define(self, srcfile, filterList=None): ''' ソースコードからフェイク関数定義を生成する Parameters ========== srcfile : string ソースコードファイル名 filterList : string 生成する関数リスト Returns ========== fakes : フェイク関数定義のリスト ''' code = self.extract_source_file(srcfile) index = Index.create() tu = index.parse(srcfile, unsaved_files=[(srcfile, code)]) func_node = get_function_decl_node(tu.cursor) if filterList != None: func_node = filter_function_decl_node(func_node, filterList) fakes = [] for func in func_node: if func.result_type.spelling == 'void': fake = cat_fff_void_func(func) else: fake = cat_fff_value_func(func) fakes += [fake] return fakes
def main(): parser = argparse.ArgumentParser( description='Checks the norme on C source files') parser.add_argument('files', metavar='FILES', type=str, nargs='+', help='source files to be processed') parser.add_argument('-d', '--dump', action='store_true', help='Dump file AST content') args = parser.parse_args() if len(args.files) == 0: parser.error('invalid number arguments') for source_file in args.files: index = Index.create() tu = index.parse(source_file, None) if not tu: parser.error("unable to load input") infos = get_info(tu.cursor, source_file) if args.dump: rprint(infos) linter = Linter(infos, source_file) linter.lint()
def parse_files(list_of_files: List[str]) -> Tuple[Any, List[Any]]: """ Use Clang to parse the provided list of files, and return a tuple of the Clang index, and the list of compiled ASTs (one for each compilation unit) """ idx = Index.create() t_units = [] # type: List[Any] # To avoid parsing the files all the time, store the parsed ASTs # of each compilation unit in a cache. if not os.path.exists(".cache"): os.mkdir(".cache") for i, filename in enumerate(list_of_files): cache_filename = '.cache/' + os.path.basename(filename) + "_cache" # Have I parsed this file before? if os.path.exists(cache_filename): # Yes, load from cache try: t_units.append( TranslationUnit.from_ast_file( cache_filename, idx)) print("[-] Loading cached AST for", filename) except TranslationUnitLoadError: print("[-] %3d%% Parsing " % ( 100*(i+1)/len(list_of_files)) + filename) t_units.append(idx.parse(filename)) t_units[-1].save(cache_filename) else: # No, parse it now. print("[-] %3d%% Parsing " % ( 100*(i+1)/len(list_of_files)) + filename) t_units.append(idx.parse(filename)) t_units[-1].save(cache_filename) return idx, t_units
def __init__(self, srcdir, files=None, filespattern="*.h", clangargs=('-x', 'c++'), pythonify_types=True): """ Default class constructor. :param `srcdir`: C/C++/ObjC source files directory path. :type `srcdir`: str :param `files`: C/C++/ObjC source files paths. :type `files`: str :param `clangargs`: Agruments passed to clang. :type `clangargs`: Sequence[str] :param `filespattern`: Unix shell like pattern for soure files :type `filespattern`: str :param `pythonify_types`: If set to True replace clang types with their python alternatives. :type `pythonify_types`: bool """ self.srcdir = srcdir self.clangargs = clangargs self.pythonify_types = pythonify_types if files is not None: self.fileslist = list(files) else: self.fileslist = glob_recursive(srcdir, filespattern) self._index = Index.create() self._pool = {} self._searchindex = {}
def find_function(self, path): """ Obtain all fully qualified names from current header file. Args: path: The path of current header file. Returns: All fully qualified names in the header file. """ # remove it when include dependencies resolved git_root = "hana" header = os.path.join(git_root, "rte", "rtebase", "include") arguments = ["-x", "c++", "-I" + git_root, "-I" + header] index = Index.create() tu = index.parse(path, arguments) func_dict = dict() decl_kinds = { "FUNCTION_DECL", "CXX_METHOD", "CONSTRUCTOR", "DESTRUCTOR", "CONVERSION_FUNCTION" } cpnt = Component().best_matched(path) for node in tu.cursor.walk_preorder(): if node.location.file and node.location.file.name == path and node.spelling: if str(node.kind).split(".")[1] in decl_kinds: func = self.fully_qualified(node, path) func_dict[func] = cpnt return func_dict
def get_tu(cpp_basename): cpp_file_path = os.path.join(kInputsDir, cpp_basename) print 'clang++ -c ' + ' '.join(extra_args) + \ ' ' + ' '.join(include_args) + ' ' + cpp_file_path index = Index.create() tu = index.parse(cpp_file_path, args=extra_args+include_args) return tu
def generate_graph(path): definitions = {} graph = Graph() index = Index.create() # Use the function declarations to create the graph edges. for (sl, tu) in get_tus(index, path): print(tu.spelling) for (k, v) in get_decl(tu, sl): sub_children = list(map(lambda x: x.kind, v.get_children())) if not (len(sub_children) > 0 and sub_children[-1] == CursorKind.COMPOUND_STMT): continue if k in definitions: raise Exception("error: ", k) definitions[k] = v graph.add_vertex(k, label=k, short=v.spelling) print("Defined functions:", len(definitions)) # Use the calls done inside each declaration to create the vertecies. for k, v in definitions.items(): edges = {} for n in get_calls(v): ref = n.referenced if ref.displayname in definitions: edges[ref.displayname] = ref for _, ref in edges.items(): graph.add_edge(k, ref.displayname) to_delete_ids = [v.index for v in graph.vs if v.degree() == 0] graph.delete_vertices(to_delete_ids) return graph
def test_code_complete(): index = Index.create() files = [ ( "fake.c", """ /// Aaa. int test1; /// Bbb. void test2(void); void f() { } """, ) ] tu = TranslationUnit.from_source( "fake.c", ["-std=c99"], unsaved_files=files, options=TranslationUnit.PARSE_INCLUDE_BRIEF_COMMENTS_IN_CODE_COMPLETION, index=index, ) cr = tu.codeComplete("fake.c", 9, 1, unsaved_files=files, include_brief_comments=True) expected = [ "{'int', ResultType} | {'test1', TypedText} || Priority: 50 || Availability: Available || Brief comment: Aaa.", "{'void', ResultType} | {'test2', TypedText} | {'(', LeftParen} | {')', RightParen} || Priority: 50 || Availability: Available || Brief comment: Bbb.", "{'return', TypedText} || Priority: 40 || Availability: Available || Brief comment: None", ] check_completion_results(cr, expected)
def parse(self, filename): """ . reads 1 file . if there is a compilation error, print a warning . get root cursor and recurse . for each STRUCT_DECL, register a new struct type . for each UNION_DECL, register a new union type . for each TYPEDEF_DECL, register a new alias/typdef to the underlying type - underlying type is cursor.type.get_declaration() for Record . for each VAR_DECL, register a Variable . for each TYPEREF ?? """ index = Index.create() self.tu = index.parse(filename, self.flags, options=self.tu_options) if not self.tu: log.warning("unable to load input") return if len(self.tu.diagnostics) > 0: for x in self.tu.diagnostics: log.warning(x.spelling) if x.severity > 2: log.warning("Source code has some error. Please fix.") log.warning(x.spelling) # code.interact(local=locals()) break root = self.tu.cursor for node in root.get_children(): self.startElement(node) return
def getBuiltinHeaderPath(library_path): index = Index.create() knownPaths = [ library_path + "/../lib/clang", # default value library_path + "/../clang", # gentoo library_path + "/clang", # opensuse library_path + "/", # Google "/usr/lib64/clang", # x86_64 (openSUSE, Fedora) "/usr/lib/clang" ] for path in knownPaths: try: files = os.listdir(path) if len(files) >= 1: files = sorted(files) subDir = files[-1] else: subDir = '.' path = path + "/" + subDir + "/include/" arg = "-I" + path if canFindBuiltinHeaders(index, [arg]): return path except Exception: pass return None
def get_tu(source, lang='c', all_warnings=False): """Obtain a translation unit from source and language. By default, the translation unit is created from source file "t.<ext>" where <ext> is the default file extension for the specified language. By default it is C, so "t.c" is the default file name. Supported languages are {c, cpp, objc}. all_warnings is a convenience argument to enable all compiler warnings. """ name = 't.c' if lang == 'cpp': name = 't.cpp' elif lang == 'objc': name = 't.m' elif lang != 'c': raise Exception('Unknown language: %s' % lang) args = [] if all_warnings: args = ['-Wall', '-Wextra'] index = Index.create() tu = index.parse(name, args=args, unsaved_files=[(name, source)]) assert tu is not None return tu
def main(): from clang.cindex import Index from pprint import pprint from optparse import OptionParser, OptionGroup global opts parser = OptionParser() parser.add_option("", "--show-ids", dest="showIDs", help="Compute cursor IDs (very slow)", action="store_true", default=False) parser.add_option("", "--max-depth", dest="maxDepth", help="Limit cursor expansion to depth N", metavar="N", type=int, default=None) parser.disable_interspersed_args() (opts, args) = parser.parse_args() # if len(args) == 0: # parser.error('invalid number arguments') index = Index.create() tu = index.parse("person.cpp", args) if not tu: parser.error("unable to load input") var_set = get_info(tu.cursor) print(var_set)
def __init__(self, lang, header, api_headers, check_all, args): # TODO: Avoid hard-coding paths and args in general. Config.set_library_file('libclang-6.0.so.1') index = Index.create() self.is_cpp = True if lang == 'c++' else False self.clang_args = ['-x', lang] self.translation_unit = index.parse(header, args + self.clang_args, options=1) for diag in self.translation_unit.diagnostics: msg = '\033[91mERROR : {} at {} line {}, column {}\033[00m' print( msg.format(diag.spelling, diag.location.file, diag.location.line, diag.location.column)) self.api_headers = api_headers self.check_all = check_all self.enum_constant_decls = [] self.function_decls = [] self.var_decls = [] self.macro_defs = [] self.record_decls = [] self.namespaces = []
def test_file(self): index = Index.create() tu = index.parse('t.c', unsaved_files=[('t.c', "")]) file = File.from_name(tu, "t.c") self.assertEqual(str(file), "t.c") self.assertEqual(file.name, "t.c") self.assertEqual(repr(file), "<File: t.c>")
def test_file(): index = Index.create() tu = index.parse('t.c', unsaved_files = [('t.c', "")]) file = File.from_name(tu, "t.c") assert str(file) == "t.c" assert file.name == "t.c" assert repr(file) == "<File: t.c>"
def parse(self, filename): """ . reads 1 file . if there is a compilation error, print a warning . get root cursor and recurse . for each STRUCT_DECL, register a new struct type . for each UNION_DECL, register a new union type . for each TYPEDEF_DECL, register a new alias/typdef to the underlying type - underlying type is cursor.type.get_declaration() for Record . for each VAR_DECL, register a Variable . for each TYPEREF ?? """ if os.path.abspath(filename) in self.__processed_location: return index = Index.create() tu = index.parse(filename, self.flags, options=self.tu_options) if not tu: log.warning("unable to load input") return self._parse_tu_diagnostics(tu, filename) self.tu = tu root = self.tu.cursor for node in root.get_children(): self.startElement(node) return
def main(args): """ divides the monolithic mock file into different mock class files. """ decl_filename = args["decl"] impl_filename = args["impl"] idx = Index.create() impl_translation_unit = TranslationUnit.from_source( impl_filename, options=TranslationUnit.PARSE_SKIP_FUNCTION_BODIES) impl_includes = get_directives(impl_translation_unit) decl_translation_unit = idx.parse(decl_filename, ["-x", "c++"]) defns = class_definitions(decl_translation_unit.cursor) decl_includes = get_directives(decl_translation_unit) impl_cursors = class_implementations(impl_translation_unit.cursor) contents = read_file_contents(impl_filename) classname_to_impl = extract_implementations(impl_cursors, contents) classnames = [cursor.spelling for cursor in defns] for defn in defns: # writing {class}.h and {classname}.cc class_name, class_defn, deps = extract_definition(defn, classnames) includes = "" for name in deps: includes += '#include "{}.h"\n'.format(to_filename(name)) class_defn = decl_includes + includes + class_defn class_impl = "" if class_name not in classname_to_impl: print("Warning: empty class {}".format(class_name)) else: impl_include = impl_includes.replace( decl_filename, "{}.h".format(to_filename(class_name))) # we need to enclose methods with namespaces namespace_prefix, namespace_suffix = get_enclosing_namespace(defn) class_impl = impl_include + namespace_prefix + \ classname_to_impl[class_name] + namespace_suffix write_file_contents(class_name, class_defn, class_impl)
def _run_checker(checker, argv): global clang_library_file if argv.execution_mode == 'full': files = core.get_files(argv.location[0]) else: files = core.get_files(argv.location[0], checker.get_pattern_hint()) if not files: cnf = 'Could not find any problem related to ' cnf += checker.get_problem_type().lower() sys.stderr.write(cnf + '\n') else: print(__current_wip(checker, files)) visitor = Visitor(checker) if clang_library_file == '': clang_libraries = glob.glob('/usr/lib*/libclang.so*') reverse_list = list(reversed(clang_libraries)) if reverse_list: clang_library_file = reverse_list[0] Config.set_library_file(clang_library_file) index = Index.create() for c_file in files: args = ["-ferror-limit=9999"] + _include_paths() root = index.parse( c_file, args=args, options=TranslationUnit.PARSE_DETAILED_PROCESSING_RECORD) ReportBlocker.blocked_lines = [] visitor.visit(root.cursor, c_file)
def _parse(self): self.prototypes = [] self.functions = {} def visit_compound(node): if node.kind == CursorKind.COMPOUND_STMT: return node for n in node.get_children(): result = visit_compound(n) if result is not None: return result return None def visit(node): if node.kind == CursorKind.FUNCTION_DECL: if str(node.extent.start.file) != self.filename: # Function is not declared in the source file return # elif node.is_definition(): elif node.location == node.get_definition().location: body_node = visit_compound(node) if body_node is None: raise AnalyzerError definition = FunctionDefinition(node, body_node) self.functions[node.spelling] = definition else: self.prototypes.append(Prototype(node)) for n in node.get_children(): visit(n) visit(Index.create().parse(self.filename).cursor)
def test_location(): index = Index.create() tu = index.parse('t.c', unsaved_files=[('t.c', baseInput)]) for n in tu.cursor.get_children(): if n.spelling == 'one': assert_location(n.location, line=1, column=5, offset=4) if n.spelling == 'two': assert_location(n.location, line=2, column=5, offset=13) # adding a linebreak at top should keep columns same tu = index.parse('t.c', unsaved_files=[('t.c', "\n" + baseInput)]) for n in tu.cursor.get_children(): if n.spelling == 'one': assert_location(n.location, line=2, column=5, offset=5) if n.spelling == 'two': assert_location(n.location, line=3, column=5, offset=14) # adding a space should affect column on first line only tu = index.parse('t.c', unsaved_files=[('t.c', " " + baseInput)]) for n in tu.cursor.get_children(): if n.spelling == 'one': assert_location(n.location, line=1, column=6, offset=5) if n.spelling == 'two': assert_location(n.location, line=2, column=5, offset=14)
def test_get_children(): index = Index.create() tu = index.parse('t.c', unsaved_files=[('t.c', kInput)]) # Skip until past start_decl. it = tu.cursor.get_children() while it.next().spelling != 'start_decl': pass tu_nodes = list(it) assert len(tu_nodes) == 3 assert tu_nodes[0].kind == CursorKind.STRUCT_DECL assert tu_nodes[0].spelling == 's0' assert tu_nodes[0].is_definition() == True assert tu_nodes[0].location.file.name == 't.c' assert tu_nodes[0].location.line == 4 assert tu_nodes[0].location.column == 8 s0_nodes = list(tu_nodes[0].get_children()) assert len(s0_nodes) == 2 assert s0_nodes[0].kind == CursorKind.FIELD_DECL assert s0_nodes[0].spelling == 'a' assert s0_nodes[1].kind == CursorKind.FIELD_DECL assert s0_nodes[1].spelling == 'b' assert tu_nodes[1].kind == CursorKind.STRUCT_DECL assert tu_nodes[1].spelling == 's1' assert tu_nodes[1].is_definition() == False assert tu_nodes[2].kind == CursorKind.FUNCTION_DECL assert tu_nodes[2].spelling == 'f0' assert tu_nodes[2].is_definition() == True
def parseHeaderFileAtPath(headerFilePath, includePaths): index = Index.create() parseHeaderArgs = [] for path in includePaths: parseHeaderArgs.append('-I{}'.format(path)) #parseHeaderArgs.extend(["-x", "c++", "-DKERNEL", "-DXNU_KERNEL_PRIVATE", "-DMACH_BSD", "-DCONFIG_MACF", "-DMACH_KERNEL_PRIVATE", "-D__LP64__", "-arch", "x86_64"]) parseHeaderArgs.extend(["-x", "c++", "-DKERNEL", "-DMACH_BSD", "-DCONFIG_MACF", "-D__LP64__", "-arch", "x86_64"]) #print parseHeaderArgs tu = index.parse(headerFilePath, args=parseHeaderArgs) diagnostics = tu.diagnostics diagnostics = filter(lambda x:x.severity > 2, diagnostics) parseResultOfHeaderFile = {} classes = [] getDefinedClasses(tu.cursor, classes) if len(classes) > 0 and len(diagnostics) > 0: print "[!] parseHeaderFileAtPath {}:\n {}".format(headerFilePath, \ "\n".join(["{}:{}".format(x.severity, str(x)) for x in diagnostics])) return None parseResultOfAllClasses = {} for classNode in classes: className = classNode.spelling parseResultOfClass = parseClass(className, classNode) parseResultOfAllClasses[className] = parseResultOfClass parseResultOfHeaderFile["classes"] = parseResultOfAllClasses # Should also handle structs, etc... return parseResultOfHeaderFile
def getClangObject(self): """ need import clang.cindex and Index just return the cursor object, which points to the target function """ index = Index.create() if self.options: tu = index.parse(self.filename, args = self.options) else: tu = index.parse(self.filename) tmpCnt = 0 for obj in tu.cursor.get_children(): if obj.spelling == self.function: if tmpCnt != int(self.count): tmpCnt = tmpCnt + 1 continue #here we succeeded in finding self.obj = obj del index del tu return True # in this case: we find it is error(just return when reaching here) del index del tu return False
def test_parse_arguments(): path = os.path.join(kInputsDir, 'parse_arguments.c') index = Index.create() tu = TranslationUnit.from_source(path, ['-DDECL_ONE=hello', '-DDECL_TWO=hi'], index=index) spellings = [c.spelling for c in tu.cursor.get_children()] assert spellings[-2] == 'hello' assert spellings[-1] == 'hi'
def test_unsaved_files_2(): import io index = Index.create() tu = TranslationUnit.from_source('fake.c', unsaved_files = [ ('fake.c', io.StringIO('int x;'))], index=index) spellings = [c.spelling for c in tu.cursor.get_children()] assert spellings[-1] == 'x'
def main(): from clang.cindex import Index from pprint import pprint from optparse import OptionParser, OptionGroup global opts parser = OptionParser("usage: %prog [options] {filename} [clang-args*]") parser.add_option("", "--show-ids", dest="showIDs", help="Compute cursor IDs (very slow)", action="store_true", default=False) parser.add_option("", "--max-depth", dest="maxDepth", help="Limit cursor expansion to depth N", metavar="N", type=int, default=None) parser.disable_interspersed_args() (opts, args) = parser.parse_args() if len(args) == 0: parser.error('invalid number arguments') index = Index.create() tu = index.parse(None, args) if not tu: parser.error("unable to load input") pprint(('diags', map(get_diag_info, tu.diagnostics))) pprint(('nodes', get_info(tu.cursor)))
def main(): from clang.cindex import Index from pprint import pprint from optparse import OptionParser, OptionGroup # TODO: global opts parser = OptionParser("usage: %prog [options] {filename} [clang-args*]") parser.disable_interspersed_args() (opts, args) = parser.parse_args() if len(args) > 0: args.append('-c') args.append('-ObjC') args.append('-m64') args.append('-fobjc-arc') tu = Index.create().parse(None, args) if tu: create_go_source(tu.cursor) else: parser.error("unable to load input") else: parser.error('invalid number arguments')
def run(c_src_filename): global conditionals, orig_file, mod_file global orig_lines, mod_lines index = Index.create() if not os.path.exists(c_src_filename): print 'Problem opening file: ' + c_src_filename return None orig_file = open(c_src_filename, 'r') mod_file = open(c_src_filename+'.instr', 'w+') orig_lines = orig_file.readlines() # Make the modified file the same as original file mod_lines = orig_lines tu = index.parse(c_src_filename) if not tu: print 'Unable to load input' return None # Get list of all if, while and for statements get_ifs_and_whiles(tu.cursor) # Add instrumentation for all branching statements add_instrumentation() # Include stdio, just in case mod_lines = ['#include <stdio.h>\n'] + mod_lines # Finally write it to the instrumented replacement file mod_file.writelines(mod_lines) print [c.location.line for c in conditionals]
def test_file(self): index = Index.create() tu = index.parse('t.c', unsaved_files = [('t.c', "")]) file = File.from_name(tu, "t.c") self.assertEqual(str(file), "t.c") self.assertEqual(file.name, "t.c") self.assertEqual(repr(file), "<File: t.c>")
def main(): from clang.cindex import Index from pprint import pprint from optparse import OptionParser, OptionGroup global opts parser = OptionParser("usage: %prog [options] {filename} [clang-args*]") parser.add_option("", "--show-ids", dest="showIDs", help="Don't compute cursor IDs (very slow)", default=False) parser.add_option("", "--max-depth", dest="maxDepth", help="Limit cursor expansion to depth N", metavar="N", type=int, default=None) parser.disable_interspersed_args() (opts, args) = parser.parse_args() if len(args) == 0: parser.error('invalid number arguments') index = Index.create() tu = index.parse(None, args) if not tu: parser.error("unable to load input") pprint(('diags', map(get_diag_info, tu.diagnostics))) pprint(('nodes', get_info(tu.cursor)))
def init_clang(): global initialized, ix if not initialized: conda_prefix = Path(getenv('CONDA_PREFIX')) Config.set_library_file(conda_prefix / 'lib' / 'libclang.so') # Monkeypatch clang monkeypatch_cursor('is_virtual', 'clang_isVirtualBase', [Cursor], c_uint) monkeypatch_cursor('is_inline', 'clang_Cursor_isFunctionInlined', [Cursor], c_uint) monkeypatch_cursor('get_specialization', 'clang_getSpecializedCursorTemplate', [Cursor], Cursor) monkeypatch_cursor('get_template_kind', 'clang_getTemplateCursorKind', [Cursor], c_uint) monkeypatch_cursor('get_num_overloaded_decl', 'clang_getNumOverloadedDecls', [Cursor], c_uint) initialized = True ix = Index.create()
def parse(filename, **options): args = options.get('clang') if args is not None: args = [s.strip() for s in args.split(',') if len(s.strip()) > 0] if len(args) == 0: args = None index = Index.create() tu = index.parse(filename, args=args, options=TranslationUnit.PARSE_DETAILED_PROCESSING_RECORD | TranslationUnit.PARSE_SKIP_FUNCTION_BODIES) top_level_comments, comments = comment_extract(tu) result = [] compat = lambda x: doccompat.convert(x, options.get('compat')) for comment in top_level_comments: result.extend(_result(comment, compat=compat)) for cursor in tu.cursor.get_children(): if cursor.hash in comments: result.extend(_recursive_parse(comments, cursor, 0, compat)) # Sort all elements by order of appearance. result.sort(key=lambda r: r[1]['line']) return result
def test_get_children(): index = Index.create() tu = index.parse("t.c", unsaved_files=[("t.c", kInput)]) # Skip until past start_decl. it = tu.cursor.get_children() while it.next().spelling != "start_decl": pass tu_nodes = list(it) assert len(tu_nodes) == 3 assert tu_nodes[0].kind == CursorKind.STRUCT_DECL assert tu_nodes[0].spelling == "s0" assert tu_nodes[0].is_definition() == True assert tu_nodes[0].location.file.name == "t.c" assert tu_nodes[0].location.line == 4 assert tu_nodes[0].location.column == 8 s0_nodes = list(tu_nodes[0].get_children()) assert len(s0_nodes) == 2 assert s0_nodes[0].kind == CursorKind.FIELD_DECL assert s0_nodes[0].spelling == "a" assert s0_nodes[1].kind == CursorKind.FIELD_DECL assert s0_nodes[1].spelling == "b" assert tu_nodes[1].kind == CursorKind.STRUCT_DECL assert tu_nodes[1].spelling == "s1" assert tu_nodes[1].is_definition() == False assert tu_nodes[2].kind == CursorKind.FUNCTION_DECL assert tu_nodes[2].spelling == "f0" assert tu_nodes[2].is_definition() == True
def test_fail_from_ast_file(): path = os.path.join(kInputsDir, 'non-existent.ast') try: index = Index.create() tu = TranslationUnit.from_ast_file(path, index=index) except TranslationUnitLoadError: tu = None assert tu == None
def __init__(self): if env.has_key('TM_PROJECT_DIRECTORY'): self.project_dir = env['TM_PROJECT_DIRECTORY'] else: self.project_dir = "." self.index = Index.create() self.type_kinds = set([CursorKind.STRUCT_DECL, CursorKind.CLASS_DECL, CursorKind.CLASS_TEMPLATE,CursorKind.TYPEDEF_DECL])
def __parse(self, args): tmp_file = b"~.hpp" return Index.create(excludeDecls = True).parse( path = tmp_file , args = args , unsaved_files = [(tmp_file, bytes(self.includes, self.encode))] , options = TranslationUnit.PARSE_SKIP_FUNCTION_BODIES | TranslationUnit.PARSE_INCOMPLETE )
def build(): cpp_file_path = os.path.join(kInputsDir, 'main.cpp') index = Index.create() tu = index.parse(cpp_file_path) G = pygraphviz.AGraph(directed=True) for this, parent in ir.each_inheritance_relation(tu.cursor): edge = (this, parent) G.add_edge(edge) return G
def initClangComplete(clang_complete_flags, clang_compilation_database, library_path): global index debug = int(vim.eval("g:clang_debug")) == 1 quiet = int(vim.eval("g:clang_quiet")) == 1 if library_path: if os.path.isdir(library_path): Config.set_library_path(library_path) else: Config.set_library_file(library_path) Config.set_compatibility_check(False) try: index = Index.create() except Exception as e: if quiet: return 0 if library_path: suggestion = "Are you sure '%s' contains libclang?" % library_path else: suggestion = "Consider setting g:clang_library_path." if debug: exception_msg = str(e) else: exception_msg = '' print('''Loading libclang failed, completion won't be available. %s %s ''' % (suggestion, exception_msg)) return 0 global builtinHeaderPath builtinHeaderPath = None if not canFindBuiltinHeaders(index): builtinHeaderPath = getBuiltinHeaderPath(library_path) if not builtinHeaderPath: print("WARNING: libclang can not find the builtin includes.") print(" This will cause slow code completion.") print(" Please report the problem.") global translationUnits translationUnits = dict() global complete_flags complete_flags = int(clang_complete_flags) global compilation_database if clang_compilation_database != '': compilation_database = CompilationDatabase.fromDirectory(clang_compilation_database) else: compilation_database = None global libclangLock libclangLock = threading.Lock() return 1
def test_a_struct(): index = Index.create() tu = index.parse('t.c', unsaved_files = [('t.c',kInput)]) for n in tu.cursor.get_children(): if n.spelling == 'teststruct': fields = list(n.get_children()) assert all(x.kind == CursorKind.FIELD_DECL for x in fields) assert fields[0].spelling == 'a' assert not fields[0].type.is_const_qualified() assert fields[0].type.kind == TypeKind.INT assert fields[0].type.get_canonical().kind == TypeKind.INT assert fields[1].spelling == 'b' assert not fields[1].type.is_const_qualified() assert fields[1].type.kind == TypeKind.TYPEDEF assert fields[1].type.get_canonical().kind == TypeKind.INT assert fields[1].type.get_declaration().spelling == 'I' assert fields[2].spelling == 'c' assert not fields[2].type.is_const_qualified() assert fields[2].type.kind == TypeKind.LONG assert fields[2].type.get_canonical().kind == TypeKind.LONG assert fields[3].spelling == 'd' assert not fields[3].type.is_const_qualified() assert fields[3].type.kind == TypeKind.ULONG assert fields[3].type.get_canonical().kind == TypeKind.ULONG assert fields[4].spelling == 'e' assert not fields[4].type.is_const_qualified() assert fields[4].type.kind == TypeKind.LONG assert fields[4].type.get_canonical().kind == TypeKind.LONG assert fields[5].spelling == 'f' assert fields[5].type.is_const_qualified() assert fields[5].type.kind == TypeKind.INT assert fields[5].type.get_canonical().kind == TypeKind.INT assert fields[6].spelling == 'g' assert not fields[6].type.is_const_qualified() assert fields[6].type.kind == TypeKind.POINTER assert fields[6].type.get_pointee().kind == TypeKind.INT assert fields[7].spelling == 'h' assert not fields[7].type.is_const_qualified() assert fields[7].type.kind == TypeKind.POINTER assert fields[7].type.get_pointee().kind == TypeKind.POINTER assert fields[7].type.get_pointee().get_pointee().kind == TypeKind.POINTER assert fields[7].type.get_pointee().get_pointee().get_pointee().kind == TypeKind.INT break else: assert False, "Didn't find teststruct??"