def test_token_parser(self):
        with self.assertRaises(NotYetImplemented):
            source_file = 'tests/samples/codefiles/see.h'
            parser = TokenParser(source_file)
            parser.parse()

        with utils.mock.patch('wakatime.dependencies.TokenParser._extract_tokens') as mock_extract_tokens:
            source_file = 'tests/samples/codefiles/see.h'
            parser = TokenParser(source_file)
            parser.tokens
            mock_extract_tokens.assert_called_once_with()
    def test_token_parser(self):
        with self.assertRaises(NotYetImplemented):
            source_file = 'tests/samples/codefiles/see.h'
            parser = TokenParser(source_file)
            parser.parse()

        with utils.mock.patch('wakatime.dependencies.TokenParser._extract_tokens') as mock_extract_tokens:
            source_file = 'tests/samples/codefiles/see.h'
            parser = TokenParser(source_file)
            parser.tokens
            mock_extract_tokens.assert_called_once_with()
    def test_token_parser(self):
        with self.assertRaises(NotYetImplemented):
            source_file = 'tests/samples/codefiles/c_only/non_empty.h'
            parser = TokenParser(source_file)
            parser.parse()

        with mock.patch('wakatime.dependencies.TokenParser._extract_tokens') as mock_extract_tokens:
            source_file = 'tests/samples/codefiles/see.h'
            parser = TokenParser(source_file)
            parser.tokens
            mock_extract_tokens.assert_called_once_with()

        parser = TokenParser(None)
        parser.append('one.two.three', truncate=True, truncate_to=1)
        parser.append('one.two.three', truncate=True, truncate_to=2)
        parser.append('one.two.three', truncate=True, truncate_to=3)
        parser.append('one.two.three', truncate=True, truncate_to=4)

        expected = [
            'one',
            'one.two',
            'one.two.three',
            'one.two.three',
        ]
        self.assertEquals(parser.dependencies, expected)