示例#1
0
  def fetch(self, builder_name, dest_dir):
    """ Downloads actual GM results for a particular builder.

    Args:
      builder_name: which builder to download results of
      dest_dir: path to directory where the image files will be written;
                if the directory does not exist yet, it will be created

    TODO(epoger): Display progress info.  Right now, it can take a long time
    to download all of the results, and there is no indication of progress.

    TODO(epoger): Download multiple images in parallel to speed things up.
    """
    json_url = posixpath.join(self._actuals_base_url, builder_name,
                              self._json_filename)
    json_contents = urllib2.urlopen(json_url).read()
    results_dict = gm_json.LoadFromString(json_contents)

    actual_results_dict = results_dict[gm_json.JSONKEY_ACTUALRESULTS]
    for result_type in sorted(actual_results_dict.keys()):
      results_of_this_type = actual_results_dict[result_type]
      if not results_of_this_type:
        continue
      for image_name in sorted(results_of_this_type.keys()):
        (test, config) = self._image_filename_re.match(image_name).groups()
        (hash_type, hash_digest) = results_of_this_type[image_name]
        source_url = gm_json.CreateGmActualUrl(
            test_name=test, hash_type=hash_type, hash_digest=hash_digest,
            gm_actuals_root_url=self._gm_actuals_root_url)
        dest_path = os.path.join(dest_dir, config, test + '.png')
        url_utils.copy_contents(source_url=source_url, dest_path=dest_path,
                                create_subdirs_if_needed=True)
示例#2
0
    def _GetActualResults(self, contents):
        """Returns the dictionary of actual results from a JSON string,
        in this form:

        {
          'test1' : 14760033689012826769,
          'test2' : 9151974350149210736,
          ...
        }

        We make these simplifying assumptions:
        1. All results are of type JSONKEY_HASHTYPE_BITMAP_64BITMD5.

        Any tests which violate those assumptions will cause an exception to
        be raised.

        Any tests for which we have no actual results will be left out of the
        returned dictionary.
        """
        result_dict = {}
        json_dict = gm_json.LoadFromString(contents)
        all_result_types = json_dict[gm_json.JSONKEY_ACTUALRESULTS]
        for result_type in all_result_types.keys():
            results_of_this_type = all_result_types[result_type]
            if results_of_this_type:
                for test_name in results_of_this_type.keys():
                    digest_pair = results_of_this_type[test_name]
                    if digest_pair[
                            0] != gm_json.JSONKEY_HASHTYPE_BITMAP_64BITMD5:
                        raise ValueError(
                            'test %s has unsupported hashtype %s' %
                            (test_name, digest_pair[0]))
                    result_dict[test_name] = digest_pair[1]
        return result_dict
示例#3
0
 def _GetActualResults(self, json_url, sections=None):
     json_contents = self._GetFileContents(json_url)
     json_dict = gm_json.LoadFromString(json_contents)
     results_to_return = {}
     actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS]
     if not sections:
         sections = actual_results.keys()
     for section in sections:
         section_results = actual_results[section]
         if section_results:
             results_to_return.update(section_results)
     return results_to_return
示例#4
0
 def _GetActualResults(self, json_url, sections=None):
     try:
         json_contents = self._GetFileContents(json_url)
     except _InternalException:
         print >> sys.stderr, (
             'could not read json_url %s ; skipping this platform.' %
             json_url)
         return None
     json_dict = gm_json.LoadFromString(json_contents)
     results_to_return = {}
     actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS]
     if not sections:
         sections = actual_results.keys()
     for section in sections:
         section_results = actual_results[section]
         if section_results:
             results_to_return.update(section_results)
     return results_to_return
示例#5
0
    def _GetExpectedResults(self, contents):
        """Returns the dictionary of expected results from a JSON string,
        in this form:

        {
          'test1' : 14760033689012826769,
          'test2' : 9151974350149210736,
          ...
        }

        We make these simplifying assumptions:
        1. Each test has either 0 or 1 allowed results.
        2. All expectations are of type JSONKEY_HASHTYPE_BITMAP_64BITMD5.

        Any tests which violate those assumptions will cause an exception to
        be raised.

        Any tests for which we have no expectations will be left out of the
        returned dictionary.
        """
        result_dict = {}
        json_dict = gm_json.LoadFromString(contents)
        all_expectations = json_dict[gm_json.JSONKEY_EXPECTEDRESULTS]

        # Prevent https://code.google.com/p/skia/issues/detail?id=1588
        # ('svndiff.py: 'NoneType' object has no attribute 'keys'')
        if not all_expectations:
            return result_dict

        for test_name in all_expectations.keys():
            test_expectations = all_expectations[test_name]
            allowed_digests = test_expectations[
                gm_json.JSONKEY_EXPECTEDRESULTS_ALLOWEDDIGESTS]
            if allowed_digests:
                num_allowed_digests = len(allowed_digests)
                if num_allowed_digests > 1:
                    raise ValueError('test %s has %d allowed digests' %
                                     (test_name, num_allowed_digests))
                digest_pair = allowed_digests[0]
                if digest_pair[0] != gm_json.JSONKEY_HASHTYPE_BITMAP_64BITMD5:
                    raise ValueError('test %s has unsupported hashtype %s' %
                                     (test_name, digest_pair[0]))
                result_dict[test_name] = digest_pair[1]
        return result_dict
示例#6
0
    def _GetExpectedResults(self, filepath):
        """Returns the dictionary of expected results from a JSON file,
        in this form:

        {
          'test1' : 14760033689012826769,
          'test2' : 9151974350149210736,
          ...
        }

        We make these simplifying assumptions:
        1. Each test has either 0 or 1 allowed results.
        2. All expectations are of type JSONKEY_HASHTYPE_BITMAP_64BITMD5.

        Any tests which violate those assumptions will cause an exception to
        be raised.

        Any tests for which we have no expectations will be left out of the
        returned dictionary.
        """
        result_dict = {}
        contents = self._GetFileContentsAsString(filepath)
        json_dict = gm_json.LoadFromString(contents)
        all_expectations = json_dict[gm_json.JSONKEY_EXPECTEDRESULTS]
        for test_name in all_expectations.keys():
            test_expectations = all_expectations[test_name]
            allowed_digests = test_expectations[
                gm_json.JSONKEY_EXPECTEDRESULTS_ALLOWEDDIGESTS]
            if allowed_digests:
                num_allowed_digests = len(allowed_digests)
                if num_allowed_digests > 1:
                    raise ValueError(
                        'test %s in file %s has %d allowed digests' %
                        (test_name, filepath, num_allowed_digests))
                digest_pair = allowed_digests[0]
                if digest_pair[0] != gm_json.JSONKEY_HASHTYPE_BITMAP_64BITMD5:
                    raise ValueError(
                        'test %s in file %s has unsupported hashtype %s' %
                        (test_name, filepath, digest_pair[0]))
                result_dict[test_name] = digest_pair[1]
        return result_dict
示例#7
0
    def _GetActualResults(self, json_url, sections=None):
        try:
            json_contents = self._GetContentsOfUrl(json_url)
        except (urllib2.HTTPError, IOError):
            message = 'unable to load JSON summary URL %s' % json_url
            if self._missing_json_is_fatal:
                raise ValueError(message)
            else:
                print '# %s' % message
                return {}

        json_dict = gm_json.LoadFromString(json_contents)
        results_to_return = {}
        actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS]
        if not sections:
            sections = actual_results.keys()
        for section in sections:
            section_results = actual_results[section]
            if section_results:
                results_to_return.update(section_results)
        return results_to_return
示例#8
0
  def get_builders(self):
    """ Returns the actuals for the given rietveld issue's tryjobs.
    {builder:string -> ActualLocation}

    e.g.
    {'Test-Android-Xoom-Tegra2-Arm7-Release': (
        'chromium-skia-gm-summaries',
        'Test-Android-Xoom-Tegra2-Arm7-Release-Trybot/actual-results.json',
        '1415041165535000')}
    """
    result = dict()
    json_filename_re = re.compile(
        '^Created: gs://([^/]+)/((?:[^/]+/)+%s)#(\d+)$'
        % re.escape(self._json_filename), re.MULTILINE)
    codereview_api_url = 'https://codereview.chromium.org/api'
    upload_gm_step_url = '/steps/Upload GM Results/logs/stdio'

    logging.info('Fetching issue %s ...' % (self._issue,))
    json_issue_url = '%s/%s' % (codereview_api_url, self._issue)
    json_issue_data = urllib2.urlopen(json_issue_url).read()
    issue_dict = gm_json.LoadFromString(json_issue_data)

    patchsets = issue_dict.get("patchsets", [])
    patchset = patchsets[-1]
    if not patchset:
      logging.warning('No patchsets for rietveld issue %s.' % (self._issue,))
      return result

    logging.info('Fetching issue %s patch %s...' % (self._issue, patchset))
    json_patchset_url = '%s/%s/%s' % (codereview_api_url, self._issue, patchset)
    json_patchset_data = urllib2.urlopen(json_patchset_url).read()
    patchset_dict = gm_json.LoadFromString(json_patchset_data)

    # try_job_results is ordered reverse chronologically
    try_job_results = patchset_dict.get('try_job_results', [])
    for try_job_result in try_job_results:
      try_builder = try_job_result.get('builder', '<bad builder>')
      if not try_builder.endswith('-Trybot'):
        logging.warning('Builder %s is not a trybot?' % (try_builder,))
        continue
      builder = try_builder[:-len('-Trybot')]
      if builder in result:
        continue

      logging.info('Fetching issue %s patch %s try %s...' %
                  (self._issue, patchset, try_builder))
      build_url = try_job_result.get('url', '<bad url>')
      gm_upload_output_url = build_url + urllib2.quote(upload_gm_step_url)
      logging.info('Fetching %s ...' % (gm_upload_output_url,))

      # Tryjobs might not produce the step, but don't let that fail everything.
      gm_upload_output = None
      try:
        gm_upload_output = urllib2.urlopen(gm_upload_output_url).read()
      except (urllib2.HTTPError, urllib2.URLError, httplib.HTTPException) as e:
        logging.warning(e)
      except Exception:
        logging.exception('Error opening %s .' % (gm_upload_output_url,))
      if not gm_upload_output:
        logging.warning('Could not fetch %s .' % (gm_upload_output_url,))
        continue

      json_filename_match = json_filename_re.search(gm_upload_output)
      if json_filename_match:
        logging.info('Found issue %s patch %s try %s result gs://%s/%s#%s .' %
                    (self._issue, patchset, builder,
                    json_filename_match.group(1),
                    json_filename_match.group(2),
                    json_filename_match.group(3)))
        result[builder] = ActualLocation(json_filename_match.group(1),
                                         json_filename_match.group(2),
                                         json_filename_match.group(3))
      else:
        logging.warning('Did not find %s for issue %s patch %s try %s.' %
                      (self._json_filename, self._issue, patchset, try_builder))

    return result