def ParseChangelogFromJSON(self, range_start, range_end, changelog_url,
                             revision_url, revision_map, file_to_revision_map):
    """Parses changelog by going over the JSON file.

    Args:
      range_start: Starting range of the regression.
      range_end: Ending range of the regression.
      changelog_url: The url to retrieve changelog from.
      revision_url: The url to retrieve individual revision from.
      revision_map: A map from a git hash number to its revision information.
      file_to_revision_map: A map from file to a git hash in which it occurs.
    """
    # Compute URLs from given range, and retrieves changelog. Stop if it fails.
    changelog_url %= (range_start, range_end)
    json_url = changelog_url + '?format=json'
    response = crash_utils.GetDataFromURL(json_url)
    if not response:
      return

    # Parse changelog from the returned object. The returned string should
    # start with ")}]'\n", so start from the 6th character.
    revisions = crash_utils.LoadJSON(response[5:])
    if not revisions:
      return

    # Parse individual revision in the log.
    for revision in revisions['log']:
      githash = revision['commit']
      self.ParseRevision(revision_url, githash, revision_map,
                         file_to_revision_map)

    # Parse the revision with range_start, because googlesource ignores
    # that one.
    self.ParseRevision(revision_url, range_start, revision_map,
                       file_to_revision_map)
    def ParseMessage(self, message, codereview_api_url):
        """Parses the message.

    It checks the message to extract the code review website and list of
    reviewers, and it also checks if the CL is a revert of another CL.

    Args:
      message: The message to parse.
      codereview_api_url: URL to retrieve codereview data from.
    """
        for line in message.splitlines():
            line = line.strip()
            review_url_line_match = REVIEW_URL_PATTERN.match(line)

            # Check if the line has the code review information.
            if review_url_line_match:

                # Get review number for the code review site from the line.
                issue_number = review_url_line_match.group(2)

                # Get JSON from the code review site, ignore the line if it fails.
                url = codereview_api_url % issue_number
                json_string = crash_utils.GetDataFromURL(url)
                if not json_string:
                    logging.warning(
                        'Failed to retrieve code review information from %s',
                        url)
                    continue

                # Load the JSON from the string, and get the list of reviewers.
                code_review = crash_utils.LoadJSON(json_string)
                if code_review:
                    self.reviewers = code_review['reviewers']

            # Check if this CL is a revert of other CL.
            if line.lower().startswith('revert'):
                self.is_reverted = True

                # Check if the line says what CL this CL is a revert of.
                revert = self.REVERT_PATTERN.match(line)
                if revert:
                    self.revert_of = revert.group(2)
                return
  def ParseRevision(self, revision_url, githash, revision_map,
                    file_to_revision_map):

    # Retrieve data from the URL, return if it fails.
    url = revision_url % githash
    response = crash_utils.GetDataFromURL(url + '?format=json')
    if not response:
      return

    # Load JSON object from the string. If it fails, terminate the function.
    json_revision = crash_utils.LoadJSON(response[5:])
    if not json_revision:
      return

    # Create a map representing object and get githash from the JSON object.
    revision = {}
    githash = json_revision['commit']

    # Set author, message and URL of this CL.
    revision['author'] = json_revision['author']['name']
    revision['time'] = json_revision['author']['time']
    revision['message'] = json_revision['message']
    revision['url'] = url

    # Iterate through the changed files.
    for diff in json_revision['tree_diff']:
      file_path = diff['new_path']
      file_change_type = diff['type']

      # Normalize file action so that it fits with svn_repository_parser.
      file_change_type = _ConvertToFileChangeType(file_change_type)

      # Add the file to the map.
      if file_path not in file_to_revision_map:
        file_to_revision_map[file_path] = []
      file_to_revision_map[file_path].append((githash, file_change_type))

    # Add this CL to the map.
    revision_map[githash] = revision

    return
    def ParseBlameInfo(self, component, file_path, line, revision):
        base_url = self.component_to_url_map[component]['repository']

        # Retrieve blame JSON file from googlesource. If it fails, return None.
        url_part = self.url_parts_map['blame_url'] % (revision, file_path)
        blame_url = base_url + url_part
        json_string = crash_utils.GetDataFromURL(blame_url)
        if not json_string:
            return

        # Parse JSON object from the string. The returned string should
        # start with ")}]'\n", so start from the 6th character.
        annotation = crash_utils.LoadJSON(json_string[5:])
        if not annotation:
            return

        # Go through the regions, which is a list of consecutive lines with same
        # author/revision.
        for blame_line in annotation['regions']:
            start = blame_line['start']
            count = blame_line['count']

            # For each region, check if the line we want the blame info of is in this
            # region.
            if start <= line and line <= start + count - 1:
                # If we are in the right region, get the information from the line.
                revision = blame_line['commit']
                author = blame_line['author']['name']
                revision_url_parts = self.url_parts_map[
                    'revision_url'] % revision
                revision_url = base_url + revision_url_parts
                # TODO(jeun): Add a way to get content from JSON object.
                content = None

                (revision_info,
                 _) = self.ParseChangelog(component, revision, revision)
                message = revision_info[revision]['message']
                return (content, revision, author, revision_url, message)

        # Return none if the region does not exist.
        return None