Exemple #1
0
 def CreateFlakeAnalysisRequest(flake):
   analysis_request = FlakeAnalysisRequest.Create(
       flake.name, flake.is_step, flake.bug_id)
   for step in flake.build_steps:
     analysis_request.AddBuildStep(step.master_name, step.builder_name,
                                   step.build_number, step.step_name,
                                   time_util.GetUTCNow())
   return analysis_request
    def testCopyFrom(self):
        request1 = FlakeAnalysisRequest.Create('flaky_test', False, 123)

        request2 = FlakeAnalysisRequest.Create('flaky_test', True, 456)
        request2.AddBuildStep('m', 'b1', 1, 's', datetime(2016, 10, 1))
        request2.user_emails = ['email']
        analysis = MasterFlakeAnalysis.Create('m', 'b', 100, 's', 't')
        analysis.Save()
        request2.analyses.append(analysis.key)

        request1.CopyFrom(request2)

        self.assertEqual(request2.is_step, request1.is_step)
        self.assertEqual(request2.bug_id, request1.bug_id)
        self.assertEqual(request2.user_emails, request1.user_emails)
        self.assertEqual(request2.build_steps, request1.build_steps)
        self.assertEqual(request2.analyses, request1.analyses)
    def testFindMatchingAnalysisForConfiguration(self, _):
        request = FlakeAnalysisRequest.Create('test', False, 123)
        analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 'test')
        analysis.Save()
        request.analyses.append(analysis.key)
        request.Save()

        self.assertEqual(
            analysis, request.FindMatchingAnalysisForConfiguration('m', 'b'))
    def testObscureFlakeAnalysisRequest(self):
        self.mock_current_user(user_email='*****@*****.**', is_admin=True)

        mocked_utcnow = datetime(2017, 05, 05, 22, 50, 10)
        self.MockUTCNow(mocked_utcnow)
        valid_request_time = obscure_emails._TimeBeforeNow(days=5)
        invalid_request_time = obscure_emails._TimeBeforeNow(
            days=obscure_emails._REQUEST_RECORD_RENTENSION_DAYS + 10)

        old_request = FlakeAnalysisRequest.Create('flake1', False, 123)
        old_request.user_emails.append('*****@*****.**')
        old_request.user_emails_obscured = False
        old_request.user_emails_last_edit = invalid_request_time
        old_request.Save()

        recent_request = FlakeAnalysisRequest.Create('flake2', False, 321)
        recent_request.user_emails.append('*****@*****.**')
        recent_request.user_emails_obscured = False
        recent_request.user_emails_last_edit = valid_request_time
        recent_request.Save()

        response = self.test_app.get('/obscure-emails',
                                     params={'format': 'json'})
        expected_response = {
            'failure_triage_count': 0,
            'flake_triage_count': 0,
            'flake_request_aggregated_count': 1,
            'flake_request_count': 0,
        }
        self.assertEqual(expected_response, response.json_body)

        old_request = FlakeAnalysisRequest.GetVersion(key='flake1', version=1)
        self.assertTrue(old_request.user_emails_obscured)
        self.assertEqual(['*****@*****.**'], old_request.user_emails)

        recent_request = FlakeAnalysisRequest.GetVersion(key='flake2',
                                                         version=1)
        self.assertFalse(recent_request.user_emails_obscured)
        self.assertEqual(['*****@*****.**'], recent_request.user_emails)
 def testAddBuildStep(self):
     t1 = datetime(2016, 10, 1, 0, 0, 0)
     t2 = datetime(2016, 10, 2, 0, 0, 0)
     t3 = datetime(2016, 10, 2, 1, 0, 0)
     t4 = datetime(2016, 10, 2, 0, 30, 0)
     request = FlakeAnalysisRequest.Create('flaky_test', False, 123)
     self.assertTrue(request.AddBuildStep('m', 'b1', 1, 's', t1))
     self.assertTrue(request.AddBuildStep('m', 'b2', 10, 's', t2))
     self.assertFalse(request.AddBuildStep('m', 'b2', 11, 's', t3))
     self.assertTrue(request.AddBuildStep('m', 'b2', 9, 's', t4))
     self.assertEqual(2, len(request.build_steps), request.build_steps)
     self.assertEqual(BuildStep.Create('m', 'b1', 1, 's', t1),
                      request.build_steps[0])
     self.assertEqual(BuildStep.Create('m', 'b2', 9, 's', t4),
                      request.build_steps[1])
  def run(self, master_name, builder_name, build_number):
    """Triggers flake analyses for flaky tests found by build failure analysis.

    Args:
      master_name (str): The master name.
      builder_name (str): The builder name.
      build_number (str): The build number.
    """

    analysis = WfAnalysis.Get(master_name, builder_name, build_number)

    if not analysis or not analysis.failure_result_map:  # pragma: no cover
      return

    for step in analysis.failure_result_map.iterkeys():
      task = WfSwarmingTask.Get(
          master_name, builder_name, build_number, step)

      if not task:  # pragma: no cover
        continue

      flaky_tests = task.classified_tests.get('flaky_tests', [])

      if not flaky_tests:  # pragma: no cover
        continue

      # Trigger a master flake analysis on each detected flaky test.
      # TODO lijeffrey): rerun all tests once typical load is determined to be
      # within reasonable limits. For experimentation with automatic flakiness
      # checking, only run 1 test per anaysis to avoid excessive load on the
      # swarming server in case there are too many flaky tests per analysis for
      # now.
      test_name = flaky_tests[0]
      request = FlakeAnalysisRequest.Create(test_name, False, None)
      request.AddBuildStep(
          master_name, builder_name, build_number, step,
          time_util.GetUTCNow())
      scheduled = flake_analysis_service.ScheduleAnalysisForFlake(
          request, '*****@*****.**', False,
          triggering_sources.FINDIT_PIPELINE)

      if scheduled:  # pragma: no branch
        logging.info('%s/%s/%s has %s flaky tests.',
                     master_name, builder_name, build_number, len(flaky_tests))
        logging.info('A flake analysis has been triggered for %s', test_name)
 def testGetNormalizedConfigurationNames(self):
     master_name = 'm'
     builder_name = 'b'
     build_number = 123
     step_name = 's'
     test_name = 't'
     reported_time = datetime(2016, 11, 16)
     request = FlakeAnalysisRequest.Create(test_name, False, 123)
     build_step = BuildStep.Create(master_name, builder_name, build_number,
                                   step_name, reported_time)
     build_step.wf_master_name = master_name
     build_step.wf_builder_name = builder_name
     build_step.wf_build_number = build_number
     build_step.wf_step_name = step_name
     request.build_steps.append(build_step)
     self.assertEqual((None, None),
                      request._GetNormalizedConfigurationNames('m2', 'b2'))
     self.assertEqual(
         (master_name, builder_name),
         request._GetNormalizedConfigurationNames(master_name,
                                                  builder_name))
  def testRequestUnsupportedAnalysis(self, _):
    master_name = 'm'
    builder_name = 'b'
    build_number = 123
    step_name = 's'
    test_name = 't'

    previous_request = FlakeAnalysisRequest.Create(test_name, False, None)
    previous_request.AddBuildStep(
        master_name, builder_name, build_number, step_name, None)
    previous_request.swarmed = False
    previous_request.supported = False

    self.assertRaisesRegexp(
        webtest.app.AppError,
        re.compile('.*not supported.*', re.MULTILINE | re.DOTALL),
        self.test_app.get,
        '/waterfall/flake',
        params={
            'url': buildbot.CreateBuildUrl(
                master_name, builder_name, build_number),
            'step_name': step_name,
            'test_name': test_name,
            'format': 'json'})
 def testWaterfallFlake(self):
     request = FlakeAnalysisRequest.Create('flaky_test', False, 123)
     request.AddBuildStep('chromium.linux', 'b1', 1, 's',
                          datetime(2016, 11, 14))
     self.assertFalse(request.on_cq)
  def testRequestExistingAnalysis(self, *_):
    master_name = 'm'
    builder_name = 'b'
    build_number = 123
    step_name = 's'
    test_name = 't'
    success_rate = 0.9

    previous_analysis = MasterFlakeAnalysis.Create(
        master_name, builder_name, build_number - 1, step_name, test_name)
    data_point = DataPoint()
    data_point.build_number = build_number - 1
    data_point.pass_rate = success_rate
    previous_analysis.data_points.append(data_point)
    previous_analysis.status = analysis_status.COMPLETED
    previous_analysis.suspected_flake_build_number = 100
    previous_analysis.request_time = datetime.datetime(2016, 10, 01, 12, 10, 00)
    previous_analysis.start_time = datetime.datetime(2016, 10, 01, 12, 10, 05)
    previous_analysis.end_time = datetime.datetime(2016, 10, 01, 13, 10, 00)
    previous_analysis.algorithm_parameters = {'iterations_to_rerun': 100}
    previous_analysis.Save()

    previous_request = FlakeAnalysisRequest.Create(test_name, False, None)
    build_step = BuildStep.Create(
        master_name, builder_name, build_number, step_name, None)
    build_step.wf_master_name = build_step.master_name
    build_step.wf_builder_name = build_step.builder_name
    build_step.wf_build_number = build_step.build_number
    build_step.wf_step_name = build_step.step_name
    previous_request.build_steps.append(build_step)
    previous_request.analyses.append(previous_analysis.key)
    previous_request.Save()

    self.mock_current_user(user_email='*****@*****.**')

    response = self.test_app.get('/waterfall/flake', params={
        'url': buildbot.CreateBuildUrl(master_name, builder_name, build_number),
        'step_name': step_name,
        'test_name': test_name,
        'format': 'json'})

    expected_check_flake_result = {
        'key': previous_analysis.key.urlsafe(),
        'pass_rates': [[12345, 0.9, '1', 100, 'git_hash_2', 12344,
                        'git_hash_1']],
        'analysis_status': STATUS_TO_DESCRIPTION.get(previous_analysis.status),
        'master_name': master_name,
        'builder_name': builder_name,
        'build_number': build_number - 1,
        'step_name': step_name,
        'test_name': test_name,
        'request_time': '2016-10-01 12:10:00 UTC',
        'build_level_number': 1,
        'revision_level_number': 0,
        'error': None,
        'iterations_to_rerun': 100,
        'pending_time': '00:00:05',
        'duration': '00:59:55',
        'suspected_flake': {
            'build_number': 100,
            'commit_position': 12345,
            'git_hash': 'a_git_hash',
            'triage_result': 0
        },
        'version_number': 1,
        'show_input_ui': False,
        'culprit': {},
        'try_job_status': None,
        'last_attempted_swarming_task': {
            'task_id': None,
            'build_number': None
        },
        'last_attempted_try_job': {},
        'user_email': '*****@*****.**'
    }

    self.assertEqual(200, response.status_int)
    self.assertEqual(expected_check_flake_result, response.json_body)
    def HandleGet(self):
        key = self.request.get('key')
        if key:
            analysis = ndb.Key(urlsafe=key).get()
            if not analysis:  # pragma: no cover
                return self.CreateError('Analysis of flake is not found', 404)
        else:
            build_url = self.request.get('url', '').strip()
            build_info = buildbot.ParseBuildUrl(build_url)
            if not build_info:  # pragma: no cover
                return self.CreateError('Unknown build info!', 400)
            master_name, builder_name, build_number = build_info

            step_name = self.request.get('step_name', '').strip()
            test_name = self.request.get('test_name', '').strip()
            bug_id = self.request.get('bug_id', '').strip()
            # TODO(lijeffrey): Add support for force flag to trigger a rerun.

            error = self._ValidateInput(step_name, test_name, bug_id)

            if error:  # pragma: no cover
                return error

            build_number = int(build_number)
            bug_id = int(bug_id) if bug_id else None
            user_email = auth_util.GetUserEmail()
            is_admin = auth_util.IsCurrentUserAdmin()

            request = FlakeAnalysisRequest.Create(test_name, False, bug_id)
            request.AddBuildStep(master_name, builder_name, build_number,
                                 step_name, time_util.GetUTCNow())
            scheduled = flake_analysis_service.ScheduleAnalysisForFlake(
                request, user_email, is_admin, triggering_sources.FINDIT_UI)

            analysis = MasterFlakeAnalysis.GetVersion(master_name,
                                                      builder_name,
                                                      build_number, step_name,
                                                      test_name)

            if not analysis:
                if scheduled is None:
                    # User does not have permission to trigger, nor was any previous
                    # analysis triggered to view.
                    return {
                        'template': 'error.html',
                        'data': {
                            'error_message':
                            ('You could schedule an analysis for flaky test only '
                             'after you login with google.com account.'),
                            'login_url':
                            self.GetLoginUrl(),
                        },
                        'return_code': 401,
                    }

                # Check if a previous request has already covered this analysis so use
                # the results from that analysis.
                request = FlakeAnalysisRequest.GetVersion(key=test_name)

                if not (request and request.analyses):
                    return {
                        'template': 'error.html',
                        'data': {
                            'error_message':
                            ('Flake analysis is not supported for this request. Either'
                             ' the build step may not be supported or the test is not '
                             'swarmed.'),
                        },
                        'return_code': 400,
                    }

                analysis = request.FindMatchingAnalysisForConfiguration(
                    master_name, builder_name)

                if not analysis:  # pragma: no cover
                    logging.error('Flake analysis was deleted unexpectedly!')
                    return {
                        'template': 'error.html',
                        'data': {
                            'error_message':
                            'Flake analysis was deleted unexpectedly!',
                        },
                        'return_code': 400
                    }

        suspected_flake = _GetSuspectedFlakeInfo(analysis)
        culprit = _GetCulpritInfo(analysis)
        build_level_number, revision_level_number = _GetNumbersOfDataPointGroups(
            analysis.data_points)

        data = {
            'key':
            analysis.key.urlsafe(),
            'master_name':
            analysis.master_name,
            'builder_name':
            analysis.builder_name,
            'build_number':
            analysis.build_number,
            'step_name':
            analysis.step_name,
            'test_name':
            analysis.test_name,
            'pass_rates': [],
            'analysis_status':
            analysis.status_description,
            'try_job_status':
            analysis_status.STATUS_TO_DESCRIPTION.get(analysis.try_job_status),
            'last_attempted_swarming_task':
            _GetLastAttemptedSwarmingTaskDetails(analysis),
            'last_attempted_try_job':
            _GetLastAttemptedTryJobDetails(analysis),
            'version_number':
            analysis.version_number,
            'suspected_flake':
            suspected_flake,
            'culprit':
            culprit,
            'request_time':
            time_util.FormatDatetime(analysis.request_time),
            'build_level_number':
            build_level_number,
            'revision_level_number':
            revision_level_number,
            'error':
            analysis.error_message,
            'iterations_to_rerun':
            analysis.iterations_to_rerun,
            'show_input_ui':
            self._ShowInputUI(analysis)
        }

        if (users.is_current_user_admin() and analysis.completed
                and analysis.triage_history):
            data['triage_history'] = analysis.GetTriageHistory()

        data['pending_time'] = time_util.FormatDuration(
            analysis.request_time, analysis.start_time
            or time_util.GetUTCNow())
        if analysis.status != analysis_status.PENDING:
            data['duration'] = time_util.FormatDuration(
                analysis.start_time, analysis.end_time
                or time_util.GetUTCNow())

        data['pass_rates'] = _GetCoordinatesData(analysis)

        return {'template': 'flake/result.html', 'data': data}