def testReportStats(self): retry_stats.SetupStats() # Insert some stats to report. retry_stats.RetryWithStats( self.CAT, self.handlerNoRetry, 3, self.callSuccess) retry_stats.RetryWithStats( self.CAT_B, self.handlerNoRetry, 3, self.callSuccess) self.assertRaises(TestRetryException, retry_stats.RetryWithStats, self.CAT, self.handlerRetry, 3, self.callFailure) out = StringIO() retry_stats.ReportStats(out) # Expecting reports for both CAT and CAT_B used above. expected = """************************************************************ ** Performance Statistics for Test Service A ** ** Success: 1 ** Failure: 1 ** Retries: 3 ** Total: 2 ************************************************************ ************************************************************ ** Performance Statistics for Test Service B ** ** Success: 1 ** Failure: 0 ** Retries: 0 ** Total: 1 ************************************************************ """ self.assertEqual(out.getvalue(), expected)
def testReportStatsEmpty(self): retry_stats.SetupStats() out = StringIO() retry_stats.ReportStats(out) # No data collected means no categories are known, nothing to report. self.assertEqual(out.getvalue(), '')
def PerformStage(self): """Perform the actual work for this stage. This includes final metadata archival, and update CIDB with our final status as well as producting a logged build result summary. """ build_id, db = self._run.GetCIDBHandle() if results_lib.Results.BuildSucceededSoFar(db, build_id, self.name): final_status = constants.FINAL_STATUS_PASSED else: final_status = constants.FINAL_STATUS_FAILED if not hasattr(self._run.attrs, 'release_tag'): # If, for some reason, sync stage was not completed and # release_tag was not set. Set it to None here because # ArchiveResults() depends the existence of this attr. self._run.attrs.release_tag = None # Set up our report metadata. self._run.attrs.metadata.UpdateWithDict( self.GetReportMetadata( final_status=final_status, completion_instance=self._completion_instance)) # Some operations can only be performed if a valid version is available. try: self._run.GetVersionInfo() self.ArchiveResults(final_status, build_id, db) metadata_url = os.path.join(self.upload_url, constants.METADATA_JSON) except cbuildbot_run.VersionNotSetError: logging.error('A valid version was never set for this run. ' 'Can not archive results.') metadata_url = '' results_lib.Results.Report(sys.stdout, current_version=(self._run.attrs.release_tag or '')) if db: # TODO(akeshet): Eliminate this status string translate once # these differing status strings are merged, crbug.com/318930 translateStatus = lambda s: (constants.BUILDER_STATUS_PASSED if s == constants.FINAL_STATUS_PASSED else constants.BUILDER_STATUS_FAILED) status_for_db = translateStatus(final_status) child_metadatas = self._run.attrs.metadata.GetDict().get( 'child-configs', []) for child_metadata in child_metadatas: db.FinishChildConfig(build_id, child_metadata['name'], translateStatus(child_metadata['status'])) # TODO(pprabhu): After BuildData and CBuildbotMetdata are merged, remove # this extra temporary object creation. # XXX:HACK We're creating a BuildData with an empty URL. Don't try to # MarkGathered this object. build_data = metadata_lib.BuildData( "", self._run.attrs.metadata.GetDict()) # TODO(akeshet): Find a clearer way to get the "primary upload url" for # the metadata.json file. One alternative is _GetUploadUrls(...)[0]. # Today it seems that element 0 of its return list is the primary upload # url, but there is no guarantee or unit test coverage of that. db.FinishBuild(build_id, status=status_for_db, summary=build_data.failure_message, metadata_url=metadata_url) duration = self._GetBuildDuration() mon_fields = { 'status': status_for_db, 'build_config': self._run.config.name, 'important': self._run.config.important } metrics.Counter( constants.MON_BUILD_COMP_COUNT).increment(fields=mon_fields) metrics.SecondsDistribution(constants.MON_BUILD_DURATION).add( duration, fields=mon_fields) # From this point forward, treat all exceptions as warnings. self._post_completion = True # Dump report about things we retry. retry_stats.ReportStats(sys.stdout) # Dump performance stats for this build versus recent builds. if db: output = StringIO.StringIO() self.CollectComparativeBuildTimings(output, build_id, db) # Bunch up our output, so it doesn't interleave with CIDB logs. sys.stdout.write(output.getvalue())
def PerformStage(self): """Perform the actual work for this stage. This includes final metadata archival, and update CIDB with our final status as well as producting a logged build result summary. """ build_identifier, _ = self._run.GetCIDBHandle() build_id = build_identifier.cidb_id buildbucket_id = build_identifier.buildbucket_id if results_lib.Results.BuildSucceededSoFar(self.buildstore, buildbucket_id, self.name): final_status = constants.BUILDER_STATUS_PASSED else: final_status = constants.BUILDER_STATUS_FAILED if not hasattr(self._run.attrs, 'release_tag'): # If, for some reason, sync stage was not completed and # release_tag was not set. Set it to None here because # ArchiveResults() depends the existence of this attr. self._run.attrs.release_tag = None # Set up our report metadata. self._run.attrs.metadata.UpdateWithDict( self.GetReportMetadata( final_status=final_status, completion_instance=self._completion_instance)) src_root = self._build_root # Workspace builders use a different buildroot for overlays. if self._run.config.workspace_branch and self._run.options.workspace: src_root = self._run.options.workspace # Add tags for the arches and statuses of the build. # arches requires crossdev which isn't available at the early part of the # build. arches = [] for board in self._run.config['boards']: toolchains = toolchain.GetToolchainsForBoard(board, buildroot=src_root) default = list( toolchain.FilterToolchains(toolchains, 'default', True)) if default: try: arches.append(toolchain.GetArchForTarget(default[0])) except cros_build_lib.RunCommandError as e: logging.warning( 'Unable to retrieve arch for board %s default toolchain %s: %s', board, default, e) tags = { 'arches': arches, 'status': final_status, } results = self._run.attrs.metadata.GetValue('results') for stage in results: tags['stage_status:%s' % stage['name']] = stage['status'] tags['stage_summary:%s' % stage['name']] = stage['summary'] self._run.attrs.metadata.UpdateKeyDictWithDict(constants.METADATA_TAGS, tags) # Some operations can only be performed if a valid version is available. try: self._run.GetVersionInfo() self.ArchiveResults(final_status) metadata_url = os.path.join(self.upload_url, constants.METADATA_JSON) except cbuildbot_run.VersionNotSetError: logging.error('A valid version was never set for this run. ' 'Can not archive results.') metadata_url = '' results_lib.Results.Report(sys.stdout, current_version=(self._run.attrs.release_tag or '')) # Upload goma log if used for BuildPackage and TestSimpleChrome. _UploadAndLinkGomaLogIfNecessary( 'BuildPackages', self._run.config.name, self._run.options.goma_dir, self._run.options.goma_client_json, self._run.attrs.metadata.GetValueWithDefault('goma_tmp_dir')) _UploadAndLinkGomaLogIfNecessary( 'TestSimpleChromeWorkflow', self._run.config.name, self._run.options.goma_dir, self._run.options.goma_client_json, self._run.attrs.metadata.GetValueWithDefault( 'goma_tmp_dir_for_simple_chrome')) if self.buildstore.AreClientsReady(): status_for_db = final_status # TODO(pprabhu): After BuildData and CBuildbotMetdata are merged, remove # this extra temporary object creation. # XXX:HACK We're creating a BuildData with an empty URL. Don't try to # MarkGathered this object. build_data = metadata_lib.BuildData( '', self._run.attrs.metadata.GetDict()) # TODO(akeshet): Find a clearer way to get the "primary upload url" for # the metadata.json file. One alternative is _GetUploadUrls(...)[0]. # Today it seems that element 0 of its return list is the primary upload # url, but there is no guarantee or unit test coverage of that. self.buildstore.FinishBuild(build_id, status=status_for_db, summary=build_data.failure_message, metadata_url=metadata_url) duration = self._GetBuildDuration() mon_fields = { 'status': status_for_db, 'build_config': self._run.config.name, 'important': self._run.config.important } metrics.Counter( constants.MON_BUILD_COMP_COUNT).increment(fields=mon_fields) metrics.CumulativeSecondsDistribution( constants.MON_BUILD_DURATION).add(duration, fields=mon_fields) if self._run.options.sanity_check_build: metrics.Counter( constants.MON_BUILD_SANITY_COMP_COUNT).increment( fields=mon_fields) metrics.Gauge( constants.MON_BUILD_SANITY_ID, description= 'The build number of the latest sanity build. Used ' 'for recovering the link to the latest failing build ' 'in the alert when a sanity build fails.', field_spec=[ ts_mon.StringField('status'), ts_mon.StringField('build_config'), ts_mon.StringField('builder_name'), ts_mon.BooleanField('important') ]).set(self._run.buildnumber, fields=dict( mon_fields, builder_name=self._run.GetBuilderName())) if config_lib.IsMasterCQ(self._run.config): self_destructed = self._run.attrs.metadata.GetValueWithDefault( constants.SELF_DESTRUCTED_BUILD, False) mon_fields = { 'status': status_for_db, 'self_destructed': self_destructed } metrics.CumulativeSecondsDistribution( constants.MON_CQ_BUILD_DURATION).add(duration, fields=mon_fields) annotator_link = uri_lib.ConstructAnnotatorUri(build_id) logging.PrintBuildbotLink('Build annotator', annotator_link) # From this point forward, treat all exceptions as warnings. self._post_completion = True # Dump report about things we retry. retry_stats.ReportStats(sys.stdout)