def testGetIdentifierStart(self):

        tokens = testutil.TokenizeSource("""
start1 . // comment
    prototype. /* another comment */
    end1

['edge'][case].prototype.
    end2 = function() {}
""")

        def _GetTokenStartingWith(token_starts_with):
            for t in tokens:
                if t.string.startswith(token_starts_with):
                    return t

        self.assertEquals(
            'start1',
            tokenutil.GetIdentifierStart(_GetTokenStartingWith('end1')).string)

        self.assertEquals(
            'start1',
            tokenutil.GetIdentifierStart(
                _GetTokenStartingWith('start1')).string)

        self.assertEquals(
            None,
            tokenutil.GetIdentifierStart(_GetTokenStartingWith('end2')))
    def testGetWholeIdentifierString(self):
        """Tests that created identifiers satisfy usage of the identifier."""
        input_lines = ['package.Foo.', '    veryLong.', '    identifier;']

        token = testutil.TokenizeSource(input_lines)

        self.assertEquals('package.Foo.veryLong.identifier',
                          tokenutil.GetIdentifierForToken(token))

        self.assertEquals(None, tokenutil.GetIdentifierForToken(token.next))
    def testGetNextCodeToken(self):

        tokens = testutil.TokenizeSource("""
start1. // comment
    /* another comment */
    end1
""")

        def _GetTokenStartingWith(token_starts_with):
            for t in tokens:
                if t.string.startswith(token_starts_with):
                    return t

        self.assertEquals(
            'end1',
            tokenutil.GetNextCodeToken(_GetTokenStartingWith('start1')).string)

        self.assertEquals(
            None,
            tokenutil.GetNextCodeToken(_GetTokenStartingWith('end1')))
    def testGetIdentifierForToken(self):

        tokens = testutil.TokenizeSource("""
start1.abc.def.prototype.
  onContinuedLine

(start2.abc.def
  .hij.klm
  .nop)

start3.abc.def
   .hij = function() {};

// An absurd multi-liner.
start4.abc.def.
   hij.
   klm = function() {};

start5 . aaa . bbb . ccc
  shouldntBePartOfThePreviousSymbol

start6.abc.def ghi.shouldntBePartOfThePreviousSymbol

var start7 = 42;

function start8() {

}

start9.abc. // why is there a comment here?
  def /* another comment */
  shouldntBePart

start10.abc // why is there a comment here?
  .def /* another comment */
  shouldntBePart

start11.abc. middle1.shouldNotBeIdentifier
""")

        def _GetTokenStartingWith(token_starts_with):
            for t in tokens:
                if t.string.startswith(token_starts_with):
                    return t

        self.assertEquals(
            'start1.abc.def.prototype.onContinuedLine',
            tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start1')))

        self.assertEquals(
            'start2.abc.def.hij.klm.nop',
            tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start2')))

        self.assertEquals(
            'start3.abc.def.hij',
            tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start3')))

        self.assertEquals(
            'start4.abc.def.hij.klm',
            tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start4')))

        self.assertEquals(
            'start5.aaa.bbb.ccc',
            tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start5')))

        self.assertEquals(
            'start6.abc.def',
            tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start6')))

        self.assertEquals(
            'start7',
            tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start7')))

        self.assertEquals(
            'start8',
            tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start8')))

        self.assertEquals(
            'start9.abc.def',
            tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start9')))

        self.assertEquals(
            'start10.abc.def',
            tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start10')))

        self.assertIsNone(
            tokenutil.GetIdentifierForToken(_GetTokenStartingWith('middle1')))
 def _GetRequireTokens(self, namespace):
     """Returns a list of tokens for a goog.require of the given namespace."""
     line_text = 'goog.require(\'' + namespace + '\');\n'
     return testutil.TokenizeSource([line_text])
    def _GetStartTokenAndNamespacesInfoForScript(self, script,
                                                 closurized_namespaces):

        token = testutil.TokenizeSource(script)
        return token, self._GetInitializedNamespacesInfo(
            token, closurized_namespaces, [])