def build_needle(self, filter_name, line, start, end, name, qualname=None): """Create a needle mapping for the given filter, line, start and end columns, and name. """ # If qualname is not provided, then use name. mapping = {'name': name, 'qualname': qualname or name} return (PLUGIN_NAME + '_' + filter_name, mapping, Extent(Position(row=line, col=start), Position(row=line, col=end)))
def line_needle(needle_type, name, start, end, qualname=None): data = {'name': name, 'start': start[1], 'end': end[1]} if qualname: data['qualname'] = qualname return (needle_type, data, Extent(Position(row=start[0], col=start[1]), Position(row=end[0], col=end[1])))
def test_sig_needles(): dummy_extent = Extent(start=Position(0, 0), end=Position(0, 0)) fixture = { 'function': [{'type': FuncSig(('int**', 'int', 'int'), 'int**'), 'span': dummy_extent}], 'variable': [{'type': 'a', 'span': dummy_extent}], } eq_(list(sig_needles(fixture)), [(('c-sig', '(int**, int, int) -> int**'), dummy_extent)])
def make_extent(self, name, location): """Return an Extent for the given name in this Location, None if we cannot construct it.""" # Remark: sometimes the location's line number is off by one (in either # direction), so we check the surroundings to make sure it's there. location.resolve() start_col = location._line.rfind(name) lineno = self.check_lineno(name, location._lineno) if lineno: return Extent(Position(lineno + 1, start_col), Position(lineno + 1, start_col + len(name)))
def process_span(props): """Turn the "loc" and "locend" fields into a "span" that's an Extent.""" if not props.get('locend', ''): # locend can be "" if isInvalid(). # This happens with some macros which call other macros, like this: # #define ID2(x) (x) # #define ID(x) ID2(x) # In the second line, ID2 will get a macro ref line, but it will lack # an extent because the SourceLocation of ID2 will not be .isValid(). # We never got that right, even in the SQLite implementation. raise UselessLine('Found a line with "loc" but without "locend".') _, row, col = _split_loc(props['loc']) _, row_end, col_end = _split_loc(props['locend']) props['span'] = Extent(Position(row, col), Position(row_end, col_end)) return props
def process_call(props): _, call_start = _process_loc(props['callloc']) _, (call_end_row, call_end_col) = _process_loc(props['calllocend']) # The span coming out of the compiler excludes the right paren. call_end_col += 1 props['span'] = Extent(call_start, Position(row=call_end_row, col=call_end_col)) props['calleeloc'] = _process_loc(props['calleeloc']) # for Jump To return props
def make_span(self, row): return Extent( Position(int(row['file_line']), int(row['file_col'])), Position(int(row['file_line_end']), int(row['file_col_end'])))
Most of these have been deleted in favor of integration tests elsewhere, and we can probably go further in that direction. """ import csv from itertools import ifilter from StringIO import StringIO from nose.tools import eq_ from dxr.indexers import Extent, Position, FuncSig from dxr.plugins.clang.condense import condense, DISPATCH_TABLE from dxr.plugins.clang.needles import sig_needles DEFAULT_EXTENT = Extent(start=Position(0, 0), end=Position(0, 0)) def condense_csv(csv_str): return condense(csv.reader(StringIO('\n'.join(ifilter(None, (x.strip() for x in csv_str.splitlines()))))), DISPATCH_TABLE) def test_smoke_test_csv(): condense_csv('') def test_sig_needles(): fixture = { 'function': [{'type': FuncSig(('int**', 'int', 'int'), 'int**'), 'span': DEFAULT_EXTENT}],
def _process_loc(locstring): """Turn a path:row:col string into (path, Position).""" src, row, col = _split_loc(locstring) return src, Position(row, col)
def test_split_into_lines(): list_eq(split_into_lines([('k', {'m': 'ap'}, Extent(Position(1, 5), Position(3, 7)))]), [('k', {'m': 'ap'}, Extent(Position(1, 5), Position(1, None))), ('k', {'m': 'ap'}, Extent(Position(2, 0), Position(2, None))), ('k', {'m': 'ap'}, Extent(Position(3, 0), Position(3, 7)))])
from nose import SkipTest from nose.tools import eq_, assert_raises from dxr.indexers import (unsparsify, by_line, group_needles, span_to_lines, key_object_pair, Extent, Position, split_into_lines, FileToSkim) KV1 = ('x', 'v1') KV2 = ('y', 'v2') KV3 = ('z', 'v3') NEEDLE1 = (KV1, Extent(Position(1, 3), Position(1, 7))) NEEDLE2 = (KV2, Extent(Position(1, 5), Position(3, 7))) NEEDLE3 = (KV3, Extent(Position(1, 0), Position(0, 0))) def list_eq(result, expected): eq_(list(result), list(expected)) def test_needle_smoke_test(): list_eq(unsparsify(lambda: [])(), []) def test_unsparsify_invalid(): """Make sure unsparify raises ValueError on extents whose ends come before their starts.""" raise SkipTest("At the moment, we tolerate these and simply warn. Once the clang compiler plugin doesn't spit these out anymore, return to raising an exception.") assert_raises(ValueError, unsparsify(lambda: [NEEDLE3]))
from parsimonious import Grammar, NodeVisitor import requests from dxr.indexers import FuncSig, Extent, Position, symbols, functions class ExtentVisitor(NodeVisitor): def visit_num(self, node, _): return int(node.text) def visit_pos(self, _, (_0, row, _1, col, _2)): return (row, col) def visit_extent(self, _, (off1, (row1, col1), __, off2, (row2, col2))): return Extent(Position(off1, row1, col1), Position(off2, row2, col2)) def generic_visit(self, _, __): return None extent_grammar = Grammar(r""" extent = num pos "-" num pos pos = "[" num ":" num "]" num = ~r"\d+" """) class ValueVisitor(NodeVisitor): def visit_val(self, node, (val,)): return val if val else node.text