def _source_lines(self): '''Return the source of every line this expression exists on (not just the source of the expression itself. We need this to use with the tokenizer -- we can't just start where the expression does.''' start_line = self._astroid_object.fromlineno - 1 end_line = self._astroid_object.tolineno source = self.get_file_source_code() start_line_index = line_column_to_absolute_index( source, start_line, 0) if end_line == source.count('\n') + 1: #we're at the end of the source end_line_index = len(source) else: end_line_index = line_column_to_absolute_index( source, end_line, 0) return self._get_source_region(start_line_index, end_line_index)
def test_line_column_to_absolute_index(): '''Test that we correctly convert from a line/column position in a block of text to a character index.''' with open(path.join(TEST_PROJECT_PATH, 'settings.py')) as source_file: source = source_file.read() assert_equal(line_column_to_absolute_index(source, 0, 0), 0) assert_equal(line_column_to_absolute_index(source, 1, 0), 21)
def test_line_column_to_absolute_index(): '''Test that we correctly convert from a line/column position in a block of text to a character index.''' with open(path.join(TEST_PROJECT_PATH, 'settings.py')) as source_file: source = source_file.read() assert_equal( line_column_to_absolute_index(source, 0, 0), 0) assert_equal( line_column_to_absolute_index(source, 1, 0), 21)
def body_start_index(self): '''The character index of the beginning of the node body, relative to the entire source file.''' return line_column_to_absolute_index( self.get_file_source_code(), self.body_start_line, self.body_start_column)
def inject_at_line(self, line_index, inject_source): '''As inject_at_index, but takes a line index instead of a character index.''' character_index_of_line = line_column_to_absolute_index( self.node.get_source(), line_index, 0) return self.inject_at_index(character_index_of_line, inject_source)
def inject_at_line(self, line_index, inject_source): '''As inject_at_index, but takes a line index instead of a character index.''' character_index_of_line = line_column_to_absolute_index( self.node.get_source(), line_index, 0) return self.inject_at_index(character_index_of_line, inject_source)
def _source_lines(self): '''Return the source of every line this expression exists on (not just the source of the expression itself. We need this to use with the tokenizer -- we can't just start where the expression does.''' start_line = self._astroid_object.fromlineno - 1 end_line = self._astroid_object.tolineno source = self.get_file_source_code() start_line_index = line_column_to_absolute_index(source, start_line, 0) if end_line == source.count('\n') + 1: #we're at the end of the source end_line_index = len(source) else: end_line_index = line_column_to_absolute_index(source, end_line, 0) return self._get_source_region(start_line_index, end_line_index)
def body_end_index(self): '''The character index of the character after the end of the node body, relative to the entire source file.''' if self.body_end_line == count_lines(self.get_file_source_code()) + 1: # we're on the last line # the "next index" doesn't really exist -- it's the end of the file # + 1 return len(self.get_file_source_code()) return line_column_to_absolute_index( self.get_file_source_code(), self.body_end_line, self.body_end_column)
def inject_after(self, inject_source): '''Generate a change that inserts inject_source starting on the line after this node.''' try: character_index_of_line = line_column_to_absolute_index( self.node.get_file_source_code(), self.node.end_line + 1, 0) except ValueError: # our node is at the end of its file # we'll need to select the last character of the file... character_index_of_line = len(self.node.get_file_source_code()) # ...and "create" a line by inserting a newline into our source inject_source = '\n' + inject_source return Change(self.node.fs_path, character_index_of_line, character_index_of_line, inject_source)
def discard_before(self, start_from): '''Discard any tokens starting at an index before start_from. Return the number of tokens discarded.''' count = 0 token = self.tokens[0] while line_column_to_absolute_index(self.source, token['start'][0], token['start'][1]) < start_from: self.consume_anything(discard=True) token = self.tokens[0] count += 1 return count
def discard_before(self, start_from): '''Discard any tokens starting at an index before start_from. Return the number of tokens discarded.''' count = 0 token = self.tokens[0] while line_column_to_absolute_index( self.source, token['start'][0], token['start'][1]) < start_from: self.consume_anything(discard=True) token = self.tokens[0] count += 1 return count
def inject_after(self, inject_source): '''Generate a change that inserts inject_source starting on the line after this node.''' try: character_index_of_line = line_column_to_absolute_index( self.node.get_file_source_code(), self.node.end_line + 1, 0) except ValueError: # our node is at the end of its file # we'll need to select the last character of the file... character_index_of_line = len(self.node.get_file_source_code()) # ...and "create" a line by inserting a newline into our source inject_source = '\n' + inject_source return Change( self.node.fs_path, character_index_of_line, character_index_of_line, inject_source)
def get_end_index(self, token): '''Return the absolute index of the end of token in self.source.''' return line_column_to_absolute_index(self.source, token['end'][0], token['end'][1])
def get_end_index(self, token): '''Return the absolute index of the end of token in self.source.''' return line_column_to_absolute_index( self.source, token['end'][0], token['end'][1])