def get_definition(tree, blob: str) -> List[Dict[str, Any]]: classes = (node for node in tree.root_node.children if node.type == 'class_declaration') definitions = [] for _class in classes: class_identifier = match_from_span([child for child in _class.children if child.type == 'identifier'][0], blob).strip() for child in (child for child in _class.children if child.type == 'class_body'): for idx, node in enumerate(child.children): if node.type == 'method_declaration': if JavaParser.is_method_body_empty(node): continue docstring = '' if idx - 1 >= 0 and child.children[idx-1].type == 'comment': docstring = match_from_span(child.children[idx - 1], blob) docstring = strip_c_style_comment_delimiters(docstring) docstring_summary = get_docstring_summary(docstring) metadata = JavaParser.get_function_metadata(node, blob) if metadata['identifier'] in JavaParser.BLACKLISTED_FUNCTION_NAMES: continue definitions.append({ 'type': node.type, 'identifier': '{}.{}'.format(class_identifier, metadata['identifier']), 'parameters': metadata['parameters'], 'function': match_from_span(node, blob), 'function_tokens': tokenize_code(node, blob), 'docstring': docstring, 'docstring_summary': docstring_summary, 'start_point': node.start_point, 'end_point': node.end_point }) return definitions
def get_definition(tree, blob: str) -> List[Dict[str, Any]]: definitions = [] comment_buffer = [] for child in tree.root_node.children: if child.type == 'comment': comment_buffer.append(child) elif child.type in ('method_declaration', 'function_declaration'): docstring = '\n'.join([match_from_span(comment, blob) for comment in comment_buffer]) docstring_summary = strip_c_style_comment_delimiters((get_docstring_summary(docstring))) metadata = GoParser.get_function_metadata(child, blob) definitions.append({ 'type': child.type, 'identifier': metadata['identifier'], 'parameters': metadata['parameters'], 'function': match_from_span(child, blob), 'function_tokens': tokenize_code(child, blob), 'docstring': docstring, 'docstring_summary': docstring_summary, 'start_point': child.start_point, 'end_point': child.end_point }) comment_buffer = [] else: comment_buffer = [] return definitions
def get_definition(tree, blob: str) -> List[Dict[str, Any]]: functions = PythonParser.get_function_definitions(tree.root_node) classes = (node for node in tree.root_node.children if node.type == 'class_definition') definitions = list(PythonParser.__process_functions(functions, blob)) for _class in classes: class_metadata = PythonParser.get_class_metadata(_class, blob) docstring_node = PythonParser.__get_docstring_node(_class) class_metadata['docstring'] = PythonParser.get_docstring( docstring_node, blob) class_metadata['docstring_summary'] = get_docstring_summary( class_metadata['docstring']) class_metadata['function'] = '' class_metadata['function_tokens'] = [] class_metadata['start_point'] = _class.start_point class_metadata['end_point'] = _class.end_point definitions.append(class_metadata) functions = PythonParser.get_function_definitions(_class) definitions.extend( PythonParser.__process_functions(functions, blob, class_metadata['identifier'])) return definitions
def __process_functions( functions: Iterable, blob: str, func_identifier_scope: Optional[str] = None ) -> Iterator[Dict[str, Any]]: for function_node in functions: if PythonParser.is_function_empty(function_node): continue function_metadata = PythonParser.get_function_metadata( function_node, blob) if func_identifier_scope is not None: function_metadata['identifier'] = '{}.{}'.format( func_identifier_scope, function_metadata['identifier']) if function_metadata['identifier'].startswith( '__') and function_metadata['identifier'].endswith( '__'): continue # Blacklist built-in functions docstring_node = PythonParser.__get_docstring_node(function_node) function_metadata['docstring'] = PythonParser.get_docstring( docstring_node, blob) function_metadata['docstring_summary'] = get_docstring_summary( function_metadata['docstring']) function_metadata['function'] = match_from_span( function_node, blob) function_metadata['function_tokens'] = tokenize_code( function_node, blob, {docstring_node}) function_metadata['start_point'] = function_node.start_point function_metadata['end_point'] = function_node.end_point yield function_metadata
def get_methods(module_or_class_node, blob: str, module_name: str, node_type: str) -> List[Dict[str, Any]]: definitions = [] comment_buffer = [] module_or_class_name = match_from_span(module_or_class_node.children[1], blob) for child in module_or_class_node.children: if child.type == 'comment': comment_buffer.append(child) elif child.type == 'method': docstring = '\n'.join([match_from_span(comment, blob).strip().strip('#') for comment in comment_buffer]) docstring_summary = get_docstring_summary(docstring) metadata = RubyParser.get_function_metadata(child, blob) if metadata['identifier'] in RubyParser.BLACKLISTED_FUNCTION_NAMES: continue definitions.append({ 'type': 'class', 'identifier': '{}.{}.{}'.format(module_name, module_or_class_name, metadata['identifier']), 'parameters': metadata['parameters'], 'function': match_from_span(child, blob), 'function_tokens': tokenize_code(child, blob), 'docstring': docstring, 'docstring_summary': docstring_summary, 'start_point': child.start_point, 'end_point': child.end_point }) comment_buffer = [] else: comment_buffer = [] return definitions
def get_declarations(declaration_node, blob: str, node_type: str) -> List[Dict[str, Any]]: declarations = [] for idx, child in enumerate(declaration_node.children): if child.type == 'name': declaration_name = match_from_span(child, blob) elif child.type == 'method_declaration': docstring = PhpParser.get_docstring(declaration_node, blob, idx) docstring_summary = get_docstring_summary(docstring) function_nodes = [] traverse_type(child, function_nodes, 'function_definition') if function_nodes: function_node = function_nodes[0] metadata = PhpParser.get_function_metadata( function_node, blob) if metadata[ 'identifier'] in PhpParser.BLACKLISTED_FUNCTION_NAMES: continue declarations.append({ 'type': node_type, 'identifier': '{}.{}'.format(declaration_name, metadata['identifier']), 'parameters': metadata['parameters'], 'function': match_from_span(child, blob), 'function_tokens': tokenize_code(child, blob), 'docstring': docstring, 'docstring_summary': docstring_summary, 'start_point': function_node.start_point, 'end_point': function_node.end_point }) return declarations
def get_definition(tree, blob: str) -> List[Dict[str, Any]]: function_nodes = [] functions = [] traverse_type(tree.root_node, function_nodes, 'function') for function in function_nodes: if function.children is None or len(function.children) == 0: continue parent_node = node_parent(tree, function) functions.append( (parent_node.type, function, JavascriptParser.get_docstring(tree, function, blob))) definitions = [] for node_type, function_node, docstring in functions: metadata = JavascriptParser.get_function_metadata( function_node, blob) docstring_summary = get_docstring_summary(docstring) if metadata[ 'identifier'] in JavascriptParser.BLACKLISTED_FUNCTION_NAMES: continue definitions.append({ 'type': node_type, 'identifier': metadata['identifier'], 'parameters': metadata['parameters'], 'function': match_from_span(function_node, blob), 'function_tokens': tokenize_code(function_node, blob), 'docstring': docstring, 'docstring_summary': docstring_summary, 'start_point': function_node.start_point, 'end_point': function_node.end_point }) return definitions