Beispiel #1
0
    def _Add(self, token_info):
        """Adds the given token info to the stack.

    Args:
      token_info: The token information to add.
    """
        if self._stack and self._stack[-1].token == token_info.token:
            # Don't add the same token twice.
            return

        if token_info.is_block or token_info.token.type == Type.START_PAREN:
            token_info.overridden_by = (
                tokenutil.GoogScopeOrNoneFromStartBlock(token_info.token))
            index = 1
            while index <= len(self._stack):
                stack_info = self._stack[-index]
                stack_token = stack_info.token

                if stack_info.line_number == token_info.line_number:
                    # In general, tokens only override each other when they are on
                    # the same line.
                    stack_info.overridden_by = token_info
                    if (token_info.token.type == Type.START_BLOCK
                            and (stack_token.IsAssignment() or stack_token.type
                                 in (Type.IDENTIFIER, Type.START_PAREN))):
                        # Multi-line blocks have lasting overrides, as in:
                        # callFn({
                        #   a: 10
                        # },
                        # 30);
                        # b/11450054. If a string is not closed properly then close_block
                        # could be null.
                        close_block = token_info.token.metadata.context.end_token
                        stack_info.is_permanent_override = close_block and (
                            close_block.line_number !=
                            token_info.token.line_number)
                elif (token_info.token.type == Type.START_BLOCK and
                      token_info.token.metadata.context.type == Context.BLOCK
                      and (stack_token.IsAssignment()
                           or stack_token.type == Type.IDENTIFIER)):
                    # When starting a function block, the override can transcend lines.
                    # For example
                    # long.long.name = function(
                    #     a) {
                    # In this case the { and the = are on different lines.  But the
                    # override should still apply.
                    stack_info.overridden_by = token_info
                    stack_info.is_permanent_override = True
                else:
                    break
                index += 1

        self._stack.append(token_info)
Beispiel #2
0
    def HandleToken(self, token, last_non_space_token):
        """Handles the given token and updates state.

    Args:
      token: The token to handle.
      last_non_space_token: The last non space token encountered
    """
        if token.type == Type.START_BLOCK:
            self._block_stack.append(token)
        if token.type == Type.IDENTIFIER and token.string == 'goog.scope':
            self._scope_depth += 1
        if token.type == Type.END_BLOCK:
            start_token = self._block_stack.pop()
            if tokenutil.GoogScopeOrNoneFromStartBlock(start_token):
                self._scope_depth -= 1
        super(JavaScriptStateTracker,
              self).HandleToken(token, last_non_space_token)
Beispiel #3
0
    def CheckToken(self, token, state):
        """Checks a token for indentation errors.

    Args:
      token: The current token under consideration
      state: Additional information about the current tree state

    Returns:
      An error array [error code, error string, error token] if the token is
      improperly indented, or None if indentation is correct.
    """

        token_type = token.type
        indentation_errors = []
        stack = self._stack
        is_first = self._IsFirstNonWhitespaceTokenInLine(token)

        # Add tokens that could decrease indentation before checking.
        if token_type == Type.END_PAREN:
            self._PopTo(Type.START_PAREN)

        elif token_type == Type.END_PARAMETERS:
            self._PopTo(Type.START_PARAMETERS)

        elif token_type == Type.END_BRACKET:
            self._PopTo(Type.START_BRACKET)

        elif token_type == Type.END_BLOCK:
            start_token = self._PopTo(Type.START_BLOCK)
            # Check for required goog.scope comment.
            if start_token:
                goog_scope = tokenutil.GoogScopeOrNoneFromStartBlock(
                    start_token.token)
                if goog_scope is not None:
                    if not token.line.endswith(';  // goog.scope\n'):
                        if (token.line.find('//') > -1
                                and token.line.find('goog.scope') >
                                token.line.find('//')):
                            indentation_errors.append([
                                errors.MALFORMED_END_OF_SCOPE_COMMENT,
                                ('Malformed end of goog.scope comment. Please use the '
                                 'exact following syntax to close the scope:\n'
                                 '});  // goog.scope'), token,
                                Position(token.start_index, token.length)
                            ])
                        else:
                            indentation_errors.append([
                                errors.MISSING_END_OF_SCOPE_COMMENT,
                                ('Missing comment for end of goog.scope which opened at line '
                                 '%d. End the scope with:\n'
                                 '});  // goog.scope' %
                                 (start_token.line_number)), token,
                                Position(token.start_index, token.length)
                            ])

        elif token_type == Type.KEYWORD and token.string in ('case',
                                                             'default'):
            self._Add(self._PopTo(Type.START_BLOCK))

        elif is_first and token.string == '.':
            # This token should have been on the previous line, so treat it as if it
            # was there.
            info = TokenInfo(token)
            info.line_number = token.line_number - 1
            self._Add(info)

        elif token_type == Type.SEMICOLON:
            self._PopTransient()

        not_binary_operator = (token_type != Type.OPERATOR
                               or token.metadata.IsUnaryOperator())
        not_dot = token.string != '.'
        if is_first and not_binary_operator and not_dot and token.type not in (
                Type.COMMENT, Type.DOC_PREFIX, Type.STRING_TEXT):
            if flags.FLAGS.debug_indentation:
                print 'Line #%d: stack %r' % (token.line_number, stack)

            # Ignore lines that start in JsDoc since we don't check them properly yet.
            # TODO(robbyw): Support checking JsDoc indentation.
            # Ignore lines that start as multi-line strings since indentation is N/A.
            # Ignore lines that start with operators since we report that already.
            # Ignore lines with tabs since we report that already.
            expected = self._GetAllowableIndentations()
            actual = self._GetActualIndentation(token)

            # Special case comments describing else, case, and default.  Allow them
            # to outdent to the parent block.
            if token_type in Type.COMMENT_TYPES:
                next_code = tokenutil.SearchExcept(token, Type.NON_CODE_TYPES)
                if next_code and next_code.type == Type.END_BLOCK:
                    next_code = tokenutil.SearchExcept(next_code,
                                                       Type.NON_CODE_TYPES)
                if next_code and next_code.string in ('else', 'case',
                                                      'default'):
                    # TODO(robbyw): This almost certainly introduces false negatives.
                    expected |= self._AddToEach(expected, -2)

            if actual >= 0 and actual not in expected:
                expected = sorted(expected)
                indentation_errors.append([
                    errors.WRONG_INDENTATION,
                    'Wrong indentation: expected any of {%s} but got %d' %
                    (', '.join(['%d' % x for x in expected]), actual), token,
                    Position(actual, expected[0])
                ])
                self._start_index_offset[
                    token.line_number] = expected[0] - actual

        # Add tokens that could increase indentation.
        if token_type == Type.START_BRACKET:
            self._Add(
                TokenInfo(token=token,
                          is_block=token.metadata.context.type ==
                          Context.ARRAY_LITERAL))

        elif token_type == Type.START_BLOCK or token.metadata.is_implied_block:
            self._Add(TokenInfo(token=token, is_block=True))

        elif token_type in (Type.START_PAREN, Type.START_PARAMETERS):
            self._Add(TokenInfo(token=token, is_block=False))

        elif token_type == Type.KEYWORD and token.string == 'return':
            self._Add(TokenInfo(token))

        elif not token.IsLastInLine() and (token.IsAssignment()
                                           or token.IsOperator('?')):
            self._Add(TokenInfo(token=token))

        # Handle implied block closes.
        if token.metadata.is_implied_block_close:
            self._PopToImpliedBlock()

        # Add some tokens only if they appear at the end of the line.
        is_last = self._IsLastCodeInLine(token)
        if is_last:
            if token_type == Type.OPERATOR:
                if token.string == ':':
                    if stack and stack[-1].token.string == '?':
                        # When a ternary : is on a different line than its '?', it doesn't
                        # add indentation.
                        if token.line_number == stack[-1].token.line_number:
                            self._Add(TokenInfo(token))
                    elif token.metadata.context.type == Context.CASE_BLOCK:
                        # Pop transient tokens from say, line continuations, e.g.,
                        # case x.
                        #     y:
                        # Want to pop the transient 4 space continuation indent.
                        self._PopTransient()
                        # Starting the body of the case statement, which is a type of
                        # block.
                        self._Add(TokenInfo(token=token, is_block=True))
                    elif token.metadata.context.type == Context.LITERAL_ELEMENT:
                        # When in an object literal, acts as operator indicating line
                        # continuations.
                        self._Add(TokenInfo(token))
                        pass
                    else:
                        # ':' might also be a statement label, no effect on indentation in
                        # this case.
                        pass

                elif token.string != ',':
                    self._Add(TokenInfo(token))
                else:
                    # The token is a comma.
                    if token.metadata.context.type == Context.VAR:
                        self._Add(TokenInfo(token))
                    elif token.metadata.context.type != Context.PARAMETERS:
                        self._PopTransient()

            elif (token.string.endswith('.')
                  and token_type in (Type.IDENTIFIER, Type.NORMAL)):
                self._Add(TokenInfo(token))
            elif token_type == Type.PARAMETERS and token.string.endswith(','):
                # Parameter lists.
                self._Add(TokenInfo(token))
            elif token.IsKeyword('var'):
                self._Add(TokenInfo(token))
            elif token.metadata.is_implied_semicolon:
                self._PopTransient()
        elif token.IsAssignment():
            self._Add(TokenInfo(token))

        return indentation_errors
  def _Add(self, token_info):
    """Adds the given token info to the stack.

    Args:
      token_info: The token information to add.
    """
    if self._stack and self._stack[-1].token == token_info.token:
      # Don't add the same token twice.
      return

    if token_info.is_block or token_info.token.type == Type.START_PAREN:
      scope_token = tokenutil.GoogScopeOrNoneFromStartBlock(token_info.token)
      token_info.overridden_by = TokenInfo(scope_token) if scope_token else None

      if (token_info.token.type == Type.START_BLOCK and
          token_info.token.metadata.context.type == Context.BLOCK):
        # Handle function() {} assignments: their block contents get special
        # treatment and are allowed to just indent by two whitespace.
        # For example
        # long.long.name = function(
        #     a) {
        # In this case the { and the = are on different lines.  But the
        # override should still apply for all previous stack tokens that are
        # part of an assignment of a block.

        has_assignment = any(x for x in self._stack if x.token.IsAssignment())
        if has_assignment:
          last_token = token_info.token.previous
          for stack_info in reversed(self._stack):
            if (last_token and
                not self._AllFunctionPropertyAssignTokens(stack_info.token,
                                                          last_token)):
              break
            stack_info.overridden_by = token_info
            stack_info.is_permanent_override = True
            last_token = stack_info.token

      index = len(self._stack) - 1
      while index >= 0:
        stack_info = self._stack[index]
        stack_token = stack_info.token

        if stack_info.line_number == token_info.line_number:
          # In general, tokens only override each other when they are on
          # the same line.
          stack_info.overridden_by = token_info
          if (token_info.token.type == Type.START_BLOCK and
              (stack_token.IsAssignment() or
               stack_token.type in (Type.IDENTIFIER, Type.START_PAREN))):
            # Multi-line blocks have lasting overrides, as in:
            # callFn({
            #   a: 10
            # },
            # 30);
            # b/11450054. If a string is not closed properly then close_block
            # could be null.
            close_block = token_info.token.metadata.context.end_token
            stack_info.is_permanent_override = close_block and (
                close_block.line_number != token_info.token.line_number)
        else:
          break
        index -= 1

    self._stack.append(token_info)