def store_stats(self): """Store stats about this run in local and optionally remote stats dbs.""" stats = { 'run_info': self.run_info.get_as_dict(), 'cumulative_timings': self.cumulative_timings.get_all(), 'self_timings': self.self_timings.get_all(), 'artifact_cache_stats': self.artifact_cache_stats.get_all() } # Dump individual stat file. # TODO(benjy): Do we really need these, once the statsdb is mature? stats_file = os.path.join(get_pants_cachedir(), 'stats', '{}.json'.format(self.run_info.get_info('id'))) safe_file_dump(stats_file, json.dumps(stats)) # Add to local stats db. StatsDBFactory.global_instance().get_db().insert_stats(stats) # Upload to remote stats db. stats_url = self.get_options().stats_upload_url if stats_url: self.post_stats(stats_url, stats, timeout=self.get_options().stats_upload_timeout) # Write stats to local json file. stats_json_file_name = self.get_options().stats_local_json_file if stats_json_file_name: self.write_stats_to_json(stats_json_file_name, stats)
def store_stats(self): """Store stats about this run in local and optionally remote stats dbs.""" run_information = self.run_info.get_as_dict() target_data = run_information.get('target_data', None) if target_data: run_information['target_data'] = ast.literal_eval(target_data) stats = { 'run_info': run_information, 'cumulative_timings': self.cumulative_timings.get_all(), 'self_timings': self.self_timings.get_all(), 'critical_path_timings': self.get_critical_path_timings().get_all(), 'artifact_cache_stats': self.artifact_cache_stats.get_all(), 'pantsd_stats': self.pantsd_stats.get_all(), 'outcomes': self.outcomes } # Dump individual stat file. # TODO(benjy): Do we really need these, once the statsdb is mature? stats_file = os.path.join(get_pants_cachedir(), 'stats', '{}.json'.format(self.run_info.get_info('id'))) safe_file_dump(stats_file, json.dumps(stats)) # Add to local stats db. StatsDBFactory.global_instance().get_db().insert_stats(stats) # Upload to remote stats db. stats_url = self.get_options().stats_upload_url if stats_url: self.post_stats(stats_url, stats, timeout=self.get_options().stats_upload_timeout) # Write stats to local json file. stats_json_file_name = self.get_options().stats_local_json_file if stats_json_file_name: self.write_stats_to_json(stats_json_file_name, stats)
def store_stats(self): """Store stats about this run in local and optionally remote stats dbs.""" stats = { 'run_info': self.run_info.get_as_dict(), 'cumulative_timings': self.cumulative_timings.get_all(), 'self_timings': self.self_timings.get_all(), 'artifact_cache_stats': self.artifact_cache_stats.get_all(), 'outcomes': self.outcomes } # Dump individual stat file. # TODO(benjy): Do we really need these, once the statsdb is mature? stats_file = os.path.join(get_pants_cachedir(), 'stats', '{}.json'.format(self.run_info.get_info('id'))) safe_file_dump(stats_file, json.dumps(stats)) # Add to local stats db. StatsDBFactory.global_instance().get_db().insert_stats(stats) # Upload to remote stats db. stats_url = self.get_options().stats_upload_url if stats_url: pid = os.fork() if pid == 0: try: self.post_stats(stats_url, stats, timeout=self.get_options().stats_upload_timeout) finally: os._exit(0) # Write stats to local json file. stats_json_file_name = self.get_options().stats_local_json_file if stats_json_file_name: self.write_stats_to_json(stats_json_file_name, stats)
def _handle_statsdata(self, relpath, params): """Show stats for pants runs in the statsdb.""" statsdb = StatsDBFactory.global_instance().get_db() statsdata = list( statsdb.get_aggregated_stats_for_cmd_line('cumulative_timings', '%')) self._send_content(json.dumps(statsdata), 'application/json')
def store_stats(self): """Store stats about this run in local and optionally remote stats dbs.""" run_information = self.run_info.get_as_dict() target_data = run_information.get('target_data', None) if target_data: run_information['target_data'] = ast.literal_eval(target_data) stats = { 'run_info': run_information, 'cumulative_timings': self.cumulative_timings.get_all(), 'self_timings': self.self_timings.get_all(), 'artifact_cache_stats': self.artifact_cache_stats.get_all(), 'outcomes': self.outcomes } # Dump individual stat file. # TODO(benjy): Do we really need these, once the statsdb is mature? stats_file = os.path.join(get_pants_cachedir(), 'stats', '{}.json'.format(self.run_info.get_info('id'))) safe_file_dump(stats_file, json.dumps(stats)) # Add to local stats db. StatsDBFactory.global_instance().get_db().insert_stats(stats) # Upload to remote stats db. stats_url = self.get_options().stats_upload_url if stats_url: pid = os.fork() if pid == 0: try: self.post_stats(stats_url, stats, timeout=self.get_options().stats_upload_timeout) finally: os._exit(0) # Write stats to local json file. stats_json_file_name = self.get_options().stats_local_json_file if stats_json_file_name: self.write_stats_to_json(stats_json_file_name, stats)
def _handle_statsdata(self, relpath, params): """Show stats for pants runs in the statsdb.""" statsdb = StatsDBFactory.global_instance().get_db() statsdata = list(statsdb.get_aggregated_stats_for_cmd_line('cumulative_timings', '%')) self._send_content(json.dumps(statsdata), 'application/json')