def __aggregate_next_second(self): """ calls aggregator if there is data """ parsed_sec = AbstractReader.pop_second(self) if parsed_sec: timestamp = int(time.mktime(parsed_sec.time.timetuple())) if timestamp in self.stat_data.keys(): parsed_sec.overall.active_threads=self.stat_data[timestamp] for marker in parsed_sec.cases: parsed_sec.cases[marker].active_threads=self.stat_data[timestamp] del self.stat_data[timestamp] self.pending_second_data_queue.append(parsed_sec) else: self.log.debug("No new seconds present")