def testNoRetryForSendResultFatalException(self):
        def raise_retry_exception(url, histogramset_json, service_account_file,
                                  token_generator_callback):
            del url, histogramset_json, service_account_file  # unused
            del token_generator_callback  # unused
            raise results_dashboard.SendResultsFatalException('Do not retry')

        with mock.patch('core.results_dashboard._SendHistogramJson',
                        side_effect=raise_retry_exception) as m:
            upload_result = results_dashboard.SendResults(
                self.perf_data,
                self.dashboard_url,
                send_as_histograms=True,
                service_account_file=self.fake_service,
                token_generator_callback=self.dummy_token_generator,
                num_retries=5)
            self.assertFalse(upload_result)
            self.assertEqual(m.call_count, 1)
Esempio n. 2
0
def main(args):
  parser = _CreateParser()
  options, extra_args = parser.parse_args(args)

  # Validate options.
  if extra_args:
    parser.error('Unexpected command line arguments')
  if not options.configuration_name or not options.results_url:
    parser.error('configuration_name and results_url are required.')

  if options.oauth_token_file:
    with open(options.oauth_token_file) as f:
      oauth_token = f.readline()
  else:
    oauth_token = None

  if not options.send_as_histograms:
    dashboard_json = _GetDashboardJson(options)
  else:
    dashboard_json = _GetDashboardHistogramData(options)

  if dashboard_json:
    if options.output_json_file:
      with open(options.output_json_file, 'w') as output_file:
        json.dump(dashboard_json, output_file,
            indent=4, separators=(',', ': '))
    if not results_dashboard.SendResults(
        dashboard_json,
        options.results_url,
        options.tmp_dir,
        options.output_json_dashboard_url,
        send_as_histograms=options.send_as_histograms,
        oauth_token=oauth_token):
      return 1
  else:
    print 'Error: No perf dashboard JSON was produced.'
    print '@@@STEP_FAILURE@@@'
    return 1
  return 0
  def testSuccessfulSendHistogramJson(self):

    def request(*args, **kwargs):
      del args, kwargs
      content = json.dumps(self.dummy_token)

      class Response(object):
        status = 200

      return Response(), content

    with mock.patch('core.results_dashboard.time.sleep') as sleep_mock:
      with mock.patch.object(
          results_dashboard.httplib2.Http, 'request', new=request):
        with mock.patch.object(logging, 'info') as log_mock:

          upload_result = results_dashboard.SendResults(
              self.perf_data,
              'dummy_benchmark',
              self.dashboard_url,
              send_as_histograms=True,
              token_generator_callback=self.dummy_token_generator,
              num_retries=5)

          self.assertTrue(upload_result)
          self.assertFalse(sleep_mock.mock_calls)

          # check token id is logged to info.
          found_token = False
          for log_call in log_mock.call_args_list:
            log = log_call[0][0] % tuple(log_call[0][1:])
            if 'dummy1234' in log:
              found_token = True
              break
          self.assertTrue(
              found_token, msg='Upload completion token not found in logs.')