def __init__(self, owner, jmeter): AbstractReader.__init__(self, owner) self.jmeter = jmeter self.results = None self.partial_buffer = '' self.buffer_size = 3 self.use_argentum = False
def __init__(self, owner, phantom): AbstractReader.__init__(self, owner) self.phantom = phantom self.phout = None self.stat = None self.stat_data = {} self.pending_datetime = None self.steps = [] self.first_request_time = sys.maxint self.partial_buffer = '' self.pending_second_data_queue = [] self.last_sample_time = 0
def __init__(self, owner, phantom): AbstractReader.__init__(self, owner) self.phantom = phantom self.phout = None self.stat = None self.stat_data = {} self.pending_datetime = None self.steps = [] self.first_request_time = sys.maxint self.partial_buffer = '' self.pending_second_data_queue = [] self.last_sample_time = 0 self.read_lines_count = 0
def pop_second(self): parsed_sec = AbstractReader.pop_second(self) if parsed_sec: self.pending_second_data_queue.append(parsed_sec) else: self.log.debug("No new seconds present") if not self.pending_second_data_queue: self.log.debug("pending_second_data_queue empty") return None else: self.log.debug("pending_second_data_queue: %s", self.pending_second_data_queue) next_time = int(time.mktime(self.pending_second_data_queue[0].time.timetuple())) if self.last_sample_time and (next_time - self.last_sample_time) > 1: self.last_sample_time += 1 self.log.debug("Adding phantom zero sample: %s", self.last_sample_time) res = self.get_zero_sample(datetime.datetime.fromtimestamp(self.last_sample_time)) else: res = self.pending_second_data_queue.pop(0) self.last_sample_time = int(time.mktime(res.time.timetuple())) res.overall.planned_requests = self.__get_expected_rps() self.log.debug("Pop result: %s", res) return res
def pop_second(self): parsed_sec = AbstractReader.pop_second(self) if parsed_sec: self.pending_second_data_queue.append(parsed_sec) else: self.log.debug("No new seconds present") if not self.pending_second_data_queue: self.log.debug("pending_second_data_queue empty") return None else: self.log.debug("pending_second_data_queue: %s", self.pending_second_data_queue) next_time = int( time.mktime(self.pending_second_data_queue[0].time.timetuple())) if self.last_sample_time and (next_time - self.last_sample_time) > 1: self.last_sample_time += 1 self.log.debug("Adding phantom zero sample: %s", self.last_sample_time) res = self.get_zero_sample( datetime.datetime.fromtimestamp(self.last_sample_time)) else: res = self.pending_second_data_queue.pop(0) self.last_sample_time = int(time.mktime(res.time.timetuple())) res.overall.planned_requests = self.__get_expected_rps() self.log.debug("Pop result: %s", res) return res
def __aggregate_next_second(self): ''' calls aggregator if there is data ''' parsed_sec = AbstractReader.pop_second(self) if parsed_sec: self.pending_second_data_queue.append(parsed_sec) timestamp = int(time.mktime(parsed_sec.time.timetuple())) if timestamp in self.stat_data.keys(): del self.stat_data[timestamp] else: self.log.debug("No new seconds present")
def __aggregate_next_second(self): """ calls aggregator if there is data """ parsed_sec = AbstractReader.pop_second(self) if parsed_sec: self.pending_second_data_queue.append(parsed_sec) timestamp = int(time.mktime(parsed_sec.time.timetuple())) if timestamp in self.stat_data.keys(): del self.stat_data[timestamp] else: self.log.debug("No new seconds present")
def __aggregate_next_second(self): """ calls aggregator if there is data """ parsed_sec = AbstractReader.pop_second(self) if parsed_sec: timestamp = int(time.mktime(parsed_sec.time.timetuple())) if timestamp in self.stat_data.keys(): parsed_sec.overall.active_threads=self.stat_data[timestamp] for marker in parsed_sec.cases: parsed_sec.cases[marker].active_threads=self.stat_data[timestamp] del self.stat_data[timestamp] self.pending_second_data_queue.append(parsed_sec) else: self.log.debug("No new seconds present")
def __aggregate_next_second(self): """ calls aggregator if there is data """ parsed_sec = AbstractReader.pop_second(self) if parsed_sec: timestamp = int(time.mktime(parsed_sec.time.timetuple())) if timestamp in self.stat_data.keys(): parsed_sec.overall.active_threads = self.stat_data[timestamp] for marker in parsed_sec.cases: parsed_sec.cases[marker].active_threads = self.stat_data[ timestamp] del self.stat_data[timestamp] self.pending_second_data_queue.append(parsed_sec) else: self.log.debug("No new seconds present")
def __init__(self, owner, jmeter): AbstractReader.__init__(self, owner) self.jmeter = jmeter self.results = None
def __init__(self, aggregator, bfg, result_cache_size=5): AbstractReader.__init__(self, aggregator) self.bfg = bfg self.result_cache_size = result_cache_size self.steps = map(list, si.status.get_info().steps)
def __init__(self, aggregator, abench): AbstractReader.__init__(self, aggregator) self.abench = abench self.results = None