Пример #1
0
    def _parse_body(self, body):
        """Tokenizes and parses the arguments given to the definition function."""
        # Don't bother parsing CreateMutableNetworkTrafficAnnotationTag(), we don't
        # care about its arguments anyways.
        if self.type_name == 'Mutable':
            return

        tokenizer = Tokenizer(body, self.file_path, self.line_number)

        # unique_id
        self.unique_id = tokenizer.advance('string_literal')
        tokenizer.advance('comma')

        # extra_id (Partial/BranchedCompleting)
        if self.type_name == 'Partial' or self.type_name == 'BranchedCompleting':
            self.extra_id = tokenizer.advance('string_literal')
            tokenizer.advance('comma')

        # partial_annotation (Completing/BranchedCompleting)
        if self.type_name == 'Completing' or self.type_name == 'BranchedCompleting':
            # Skip the |partial_annotation| argument. It can be a variable_name, or a
            # FunctionName(), so skip the parentheses if they're there.
            tokenizer.advance('symbol')
            if tokenizer.maybe_advance('left_paren'):
                tokenizer.advance('right_paren')
            tokenizer.advance('comma')

        # proto text
        self.text = tokenizer.advance('string_literal')

        # The function call should end here without any more arguments.
        assert tokenizer.advance('right_paren')
 def testMaybeAdvance(self):
     tokenizer = Tokenizer('"hello", world', 'foo.txt', 33)
     self.assertEqual(None, tokenizer.maybe_advance('symbol'))
     self.assertEqual('hello', tokenizer.maybe_advance('string_literal'))
     self.assertEqual(',', tokenizer.maybe_advance('comma'))
     self.assertEqual(None, tokenizer.maybe_advance('left_paren'))
     self.assertEqual('world', tokenizer.maybe_advance('symbol'))
     self.assertEqual(None, tokenizer.maybe_advance('right_paren'))
Пример #3
0
    def _parse_string(self, tokenizer: Tokenizer) -> str:
        """Parse a string value.

    It could be a string literal by itself, or multiple string literals
    concatenated together. Add a newline to the string for each
    concatenation."""
        text = tokenizer.advance('string_literal')
        while True:
            # Perform concatenations.
            if tokenizer.maybe_advance('plus') is None:
                break
            text += '\n'
            text += tokenizer.advance('string_literal')
        return text