def test_assert_statement_bool(self): """Check that asssertStatement() works with a simple boolean case""" rolls_dfr = pd.DataFrame({"results": [1, 3, 2, 6, 2, 4]}) trace = trappy.BareTrace() trace.add_parsed_event("dice_rolls", rolls_dfr) config = {"MAX_DICE_NUMBER": 6} t = Analyzer(trace, config) statement = "numpy.max(dice_rolls:results) <= MAX_DICE_NUMBER" self.assertTrue(t.assertStatement(statement, select=0))
def test_assert_statement_dataframe(self): """assertStatement() works if the generated statement creates a pandas.DataFrame of bools""" rolls_dfr = pd.DataFrame({"results": [1, 3, 2, 6, 2, 4]}) trace = trappy.BareTrace() trace.add_parsed_event("dice_rolls", rolls_dfr) config = {"MIN_DICE_NUMBER": 1, "MAX_DICE_NUMBER": 6} t = Analyzer(trace, config) statement = "(dice_rolls:results <= MAX_DICE_NUMBER) & (dice_rolls:results >= MIN_DICE_NUMBER)" self.assertTrue(t.assertStatement(statement)) statement = "dice_rolls:results == 3" self.assertFalse(t.assertStatement(statement))
def test_boosted_utilization_signal(self): """The boosted utilization signal is appropriately boosted The margin should match the formula (sched_load_scale - util) * boost""" for tc in self.conf["confs"]: test_id = tc["tag"] wload_idx = self.conf["wloads"].keys()[0] run_dir = os.path.join(self.te.res_dir, "rtapp:{}:{}".format(test_id, wload_idx), "1") ftrace_events = ["sched_boost_task"] ftrace = trappy.FTrace(run_dir, scope="custom", events=ftrace_events) first_task_params = self.conf["wloads"][wload_idx]["conf"][ "params"] first_task_name = first_task_params.keys()[0] rta_task_name = "task_{}".format(first_task_name) sbt_dfr = ftrace.sched_boost_task.data_frame boost_task_rtapp = sbt_dfr[sbt_dfr.comm == rta_task_name] ftrace.add_parsed_event("boost_task_rtapp", boost_task_rtapp) # Avoid the first period as the task starts with a very # high load and it overutilizes the CPU rtapp_period = first_task_params[first_task_name]["params"][ "period_ms"] task_start = boost_task_rtapp.index[0] after_first_period = task_start + (rtapp_period / 1000.) boost = tc["cgroups"]["conf"]["schedtune"]["/stune"]["boost"] / 100. analyzer_const = { "SCHED_LOAD_SCALE": 1024, "BOOST": boost, } analyzer = Analyzer(ftrace, analyzer_const, window=(after_first_period, None)) statement = "(((SCHED_LOAD_SCALE - boost_task_rtapp:util) * BOOST) // 100) == boost_task_rtapp:margin" error_msg = "task was not boosted to the expected margin: {}".\ format(boost) self.assertTrue(analyzer.assertStatement(statement), msg=error_msg)
def test_boosted_utilization_signal(self): """Tasks in stune groups are boosted""" for tc in self.confs: conf_id = tc["tag"] wload_id = self.wloads.keys()[0] run_dir = os.path.join(self.te.res_dir, "rtapp:{}:{}".format(conf_id, wload_id), "1") ftrace_events = ["sched_boost_task"] ftrace = trappy.FTrace(run_dir, scope="custom", events=ftrace_events) first_task_params = self.wloads[wload_id]["conf"]["params"] first_task_name = first_task_params.keys()[0] rta_task_name = "task_{}".format(first_task_name) # Avoid the first period as the task starts with a very # high load and it overutilizes the CPU rtapp_period = first_task_params[first_task_name]["params"][ "period_ms"] sbt_dfr = ftrace.sched_boost_task.data_frame task_start = sbt_dfr[sbt_dfr.comm == rta_task_name].index[0] after_first_period = task_start + (rtapp_period / 1000.) boost = tc["cgroups"]["conf"]["schedtune"]["/stune"]["boost"] analyzer_const = { "SCHED_LOAD_SCALE": 1024, "BOOST": boost, } analyzer = Analyzer(ftrace, analyzer_const, window=(after_first_period, None), filters={"comm": rta_task_name}) if boost == 0: statement = "sched_boost_task:margin == 0" elif boost > 0: statement = "(((SCHED_LOAD_SCALE - sched_boost_task:util) * BOOST) // 100) == sched_boost_task:margin" else: statement = "-((-sched_boost_task:util * BOOST) // 100) == sched_boost_task:margin" error_msg = "task was not boosted to the expected margin: {:.2f}"\ .format(boost / 100.) self.assertTrue(analyzer.assertStatement(statement), msg=error_msg)
def test_boosted_utilization_signal(self): """Tasks in stune groups are boosted""" for tc in self.confs: conf_id = tc["tag"] wload_id = self.wloads.keys()[0] run_dir = os.path.join(self.te.res_dir, "rtapp:{}:{}".format(conf_id, wload_id), "1") ftrace_events = ["sched_boost_task"] ftrace = trappy.FTrace(run_dir, scope="custom", events=ftrace_events) first_task_params = self.wloads[wload_id]["conf"]["params"] first_task_name = first_task_params.keys()[0] rta_task_name = "task_{}".format(first_task_name) # Avoid the first period as the task starts with a very # high load and it overutilizes the CPU rtapp_period = first_task_params[first_task_name]["params"]["period_ms"] sbt_dfr = ftrace.sched_boost_task.data_frame task_start = sbt_dfr[sbt_dfr.comm == rta_task_name].index[0] after_first_period = task_start + (rtapp_period / 1000.) boost = tc["cgroups"]["conf"]["schedtune"]["/stune"]["boost"] analyzer_const = { "SCHED_LOAD_SCALE": 1024, "BOOST": boost, } analyzer = Analyzer(ftrace, analyzer_const, window=(after_first_period, None), filters={"comm": rta_task_name}) if boost == 0: statement = "sched_boost_task:margin == 0" elif boost > 0: statement = "(((SCHED_LOAD_SCALE - sched_boost_task:util) * BOOST) // 100) == sched_boost_task:margin" else: statement = "-((-sched_boost_task:util * BOOST) // 100) == sched_boost_task:margin" error_msg = "task was not boosted to the expected margin: {:.2f}"\ .format(boost / 100.) self.assertTrue(analyzer.assertStatement(statement), msg=error_msg)
def test_boosted_utilization_signal(self): """The boosted utilization signal is appropriately boosted The margin should match the formula (sched_load_scale - util) * boost""" for tc in self.conf["confs"]: test_id = tc["tag"] wload_idx = self.conf["wloads"].keys()[0] run_dir = os.path.join(self.te.res_dir, "rtapp:{}:{}".format(test_id, wload_idx), "1") ftrace_events = ["sched_boost_task"] ftrace = trappy.FTrace(run_dir, scope="custom", events=ftrace_events) first_task_params = self.conf["wloads"][wload_idx]["conf"]["params"] first_task_name = first_task_params.keys()[0] rta_task_name = "task_{}".format(first_task_name) sbt_dfr = ftrace.sched_boost_task.data_frame boost_task_rtapp = sbt_dfr[sbt_dfr.comm == rta_task_name] ftrace.add_parsed_event("boost_task_rtapp", boost_task_rtapp) # Avoid the first period as the task starts with a very # high load and it overutilizes the CPU rtapp_period = first_task_params[first_task_name]["params"]["period_ms"] task_start = boost_task_rtapp.index[0] after_first_period = task_start + (rtapp_period / 1000.) boost = tc["cgroups"]["conf"]["schedtune"]["/stune"]["boost"] / 100. analyzer_const = { "SCHED_LOAD_SCALE": 1024, "BOOST": boost, } analyzer = Analyzer(ftrace, analyzer_const, window=(after_first_period, None)) statement = "(((SCHED_LOAD_SCALE - boost_task_rtapp:util) * BOOST) // 100) == boost_task_rtapp:margin" error_msg = "task was not boosted to the expected margin: {}".\ format(boost) self.assertTrue(analyzer.assertStatement(statement), msg=error_msg)
def setUpClass(cls): # We can run a workload invocation script here # Which then copies the required traces for analysis to # the host. trace_file = "update_a_trace_path_here" run = trappy.Run(trace_file, "test_run") # Define the parameters that you intend to use in the grammar config = {} config["THERMAL"] = trappy.thermal.Thermal config["OUT"] = trappy.cpu_power.CpuOutPower config["IN"] = trappy.cpu_power.CpuInPower config["PID"] = trappy.pid_controller.PIDController config["GOVERNOR"] = trappy.thermal.ThermalGovernor config["CONTROL_TEMP"] = 77000 config["SUSTAINABLE_POWER"] = 2500 config["EXPECTED_TEMP_QRT"] = 95 config["EXPECTED_STD_PCT"] = 5 # Define a Topology cls.BIG = '000000f0' cls.LITTLE = '0000000f' cls.tz = 0 cls.analyzer = Analyzer(run, config)
def __init__(self, ftrace, config=None): self._ftrace = Utils.init_ftrace(ftrace) self._analyzer = Analyzer(self._ftrace, config)
class ThermalAssert(object): """A class that accepts a TRAPpy FTrace object and provides assertions for thermal behaviours :param ftrace: A path to the trace file or a TRAPpy FTrace object :type ftrace: str, :mod:`trappy.ftrace.FTrace` """ def __init__(self, ftrace, config=None): self._ftrace = Utils.init_ftrace(ftrace) self._analyzer = Analyzer(self._ftrace, config) def getThermalResidency(self, temp_range, window, percent=False): """Return the total time spent in a given temperature range :param temp_range: A tuple of (low_temp, high_temp) which specifies the range of temperature that one intends to calculate the residency for. :type temp_range: tuple :param window: A (start, end) tuple to limit the scope of the residency calculation. :type window: tuple :param percent: Returns the residency as a percentage of the total duration of the trace :type percent: bool .. seealso: :mod:`bart.thermal.ThermalAssert.ThermalAssert.assertThermalResidency` """ # Get a pivoted thermal temperature data using the grammar data = self._analyzer.getStatement("trappy.thermal.Thermal:temp") result = {} for pivot, data_frame in data.groupby(axis=1, level=0): series = data_frame[pivot] series = Utils.select_window(series, window) mask = (series >= temp_range[0]) & (series <= temp_range[1]) index = series.index.values # pylint fails to recognize numpy members. # pylint: disable=no-member shift_index = np.roll(index, 1) # pylint: enable=no-member shift_index[0] = 0 result[pivot] = sum((index - shift_index)[mask.values]) if percent: result[pivot] = ( result[pivot] * 100.0) / self._ftrace.get_duration() return result def assertThermalResidency( self, expected_value, operator, temp_range, window, percent=False): """ :param expected_value: The expected value of the residency :type expected_value: double :param operator: A binary operator function that returns a boolean. For example: :: import operator op = operator.ge assertThermalResidency(temp_range, expected_value, op) Will do the following check: :: getThermalResidency(temp_range) >= expected_value A custom function can also be passed: :: THRESHOLD=5 def between_threshold(a, expected): return abs(a - expected) <= THRESHOLD :param temp_range: A tuple of (low_temp, high_temp) which specifies the range of temperature that one intends to calculate the residency for. :type temp_range: tuple :param window: A (start, end) tuple to limit the scope of the residency calculation. :type window: tuple :param percent: Returns the residency as a percentage of the total duration of the trace :type percent: bool .. seealso: :mod:`bart.thermal.ThermalAssert.ThermalAssert.assertThermalResidency` """ residency = self.getThermalResidency(temp_range, window, percent) return operator(residency, expected_value)
def __init__(self, run, config=None): self._run = Utils.init_run(run) self._analyzer = Analyzer(self._run, config)
class ThermalAssert(object): """A class that accepts a TRAPpy Run object and provides assertions for thermal behaviours :param run: A path to the trace file or a TRAPpy Run object :type run: str, :mod:`trappy.run.Run` """ def __init__(self, run, config=None): self._run = Utils.init_run(run) self._analyzer = Analyzer(self._run, config) def getThermalResidency(self, temp_range, window, percent=False): """Return the total time spent in a given temperature range :param temp_range: A tuple of (low_temp, high_temp) which specifies the range of temperature that one intends to calculate the residency for. :type temp_range: tuple :param window: A (start, end) tuple to limit the scope of the residency calculation. :type window: tuple :param percent: Returns the residency as a percentage of the total duration of the trace :type percent: bool .. seealso: :mod:`bart.thermal.ThermalAssert.ThermalAssert.assertThermalResidency` """ # Get a pivoted thermal temperature data using the grammar data = self._analyzer.getStatement("trappy.thermal.Thermal:temp") result = {} for pivot, data_frame in data.groupby(axis=1, level=0): series = data_frame[pivot] series = Utils.select_window(series, window) mask = (series >= temp_range[0]) & (series <= temp_range[1]) index = series.index.values # pylint fails to recognize numpy members. # pylint: disable=no-member shift_index = np.roll(index, 1) # pylint: enable=no-member shift_index[0] = 0 result[pivot] = sum((index - shift_index)[mask.values]) if percent: result[pivot] = (result[pivot] * 100.0) / self._run.get_duration() return result def assertThermalResidency(self, expected_value, operator, temp_range, window, percent=False): """ :param expected_value: The expected value of the residency :type expected_value: double :param operator: A binary operator function that returns a boolean. For example: :: import operator op = operator.ge assertThermalResidency(temp_range, expected_value, op) Will do the following check: :: getThermalResidency(temp_range) >= expected_value A custom function can also be passed: :: THRESHOLD=5 def between_threshold(a, expected): return abs(a - expected) <= THRESHOLD :param temp_range: A tuple of (low_temp, high_temp) which specifies the range of temperature that one intends to calculate the residency for. :type temp_range: tuple :param window: A (start, end) tuple to limit the scope of the residency calculation. :type window: tuple :param percent: Returns the residency as a percentage of the total duration of the trace :type percent: bool .. seealso: :mod:`bart.thermal.ThermalAssert.ThermalAssert.assertThermalResidency` """ residency = self.getThermalResidency(temp_range, window, percent) return operator(residency, expected_value)
class ThermalAssert(object): """A class that accepts a TRAPpy Run object and provides assertions for thermal behaviours""" def __init__(self, run, config=None): self._run = Utils.init_run(run) self._analyzer = Analyzer(self._run, config) def getThermalResidency(self, temp_range, window, percent=False): """Returns the total time spent in a given temperature range Args: temp_range (tuple): A tuple of (low_temp, high_temp) which the specifies the range of temperature that one intends to calculate the residency for. window (tuple): A (start, end) tuple to limit the scope of the residency calculation. percent: Returns the residency as a percentage of the total duration of the trace """ # Get a pivoted thermal temperature data using the grammar data = self._analyzer.getStatement("trappy.thermal.Thermal:temp") result = {} for pivot, data_frame in data.groupby(axis=1, level=0): series = data_frame[pivot] series = Utils.select_window(series, window) mask = (series >= temp_range[0]) & (series <= temp_range[1]) index = series.index.values # pylint fails to recognize numpy members. # pylint: disable=no-member shift_index = np.roll(index, 1) # pylint: enable=no-member shift_index[0] = 0 result[pivot] = sum((index - shift_index)[mask.values]) if percent: result[pivot] = ( result[pivot] * 100.0) / self._run.get_duration() return result def assertThermalResidency( self, expected_value, operator, temp_range, window, percent=False): """ Args: expected_value (double): The expected value of the residency operator (function): A binary operator function that returns a boolean temp_range (tuple): A tuple of (low_temp, high_temp) which the specifies the range of temperature that one intends to calculate the residency for. window (tuple): A (start, end) tuple to limit the scope of the residency calculation. percent: Returns the residency as a percentage of the total duration of the trace """ residency = self.getThermalResidency(temp_range, window, percent) return operator(residency, expected_value)