def _parse_job_counters(self, job_id): # Attempt to fetch total records from the job's Hive counter total_records, total_size = None, None job = get_job(self.request, job_id=job_id) if not job or not job.counters: raise PopupException(_('Failed to get job details or job does not contain counters data.')) counter_groups = job.counters.get('counterGroup') # Returns list of counter groups with 'counterGroupName' and 'counter' if counter_groups: # Extract totalCounterValue from HIVE counter group hive_counters = next((group for group in counter_groups if group.get('counterGroupName', '').upper() == 'HIVE'), None) if hive_counters: total_records = next((counter.get('totalCounterValue') for counter in hive_counters['counter'] if counter['name'] == 'RECORDS_OUT_0'), None) else: LOG.info("No HIVE counter group found for job: %s" % job_id) # Extract totalCounterValue from FileSystemCounter counter group fs_counters = next((group for group in counter_groups if group.get('counterGroupName') == 'org.apache.hadoop.mapreduce.FileSystemCounter'), None) if fs_counters: total_size = next((counter.get('totalCounterValue') for counter in fs_counters['counter'] if counter['name'] == 'HDFS_BYTES_WRITTEN'), None) else: LOG.info("No FileSystemCounter counter group found for job: %s" % job_id) return total_records, total_size