def test_default_aggfunc_multiple_trigger(self): """MultiTriggerAggregator with the default aggfunc""" trigger_fire = Trigger(self._trace, self._trace.aim_and_fire, filters={"result": "fire"}, pivot="identifier", value=1) trigger_blank = Trigger(self._trace, self._trace.aim_and_fire, filters={"result": "blank"}, pivot="identifier", value=2) aggregator = MultiTriggerAggregator([trigger_fire, trigger_blank], self.topology) results = aggregator.aggregate(level="cpu") expected_results = [ pd.Series([1., 2., 1., 0., 0., 0.], index=[0.1, 0.2, 0.3, 0.4, 0.5, 0.6]), pd.Series([0., 0., 0., 2., 1., 2.], index=[0.1, 0.2, 0.3, 0.4, 0.5, 0.6]), ] self.assertEqual(len(results), len(expected_results)) for result, expected in zip(results, expected_results): assert_series_equal(result, expected)
def test_vector_aggfunc_multiple_trigger(self): """TestAggregator: multi trigger vector aggfunc""" def aggfunc(series): return series.cumsum() filters = {"result": "fire"} event_class = self._trace.aim_and_fire value = 1 pivot = "identifier" trigger_fire = Trigger(self._trace, event_class, filters, value, pivot) filters = {"result": "blank"} value = -1 trigger_blank = Trigger(self._trace, event_class, filters, value, pivot) aggregator = MultiTriggerAggregator([trigger_fire, trigger_blank], self.topology, aggfunc=aggfunc) # There are three "fire" in total # The all level in topology looks like # [[0, 1]] result = aggregator.aggregate(level="all") expected_result = pd.Series([1.0, 0.0, 1.0, 0.0, 1.0, 0.0], index=pd.Index( [0.1, 0.2, 0.3, 0.4, 0.5, 0.6])) assert_series_equal(result[0], expected_result)
def test_scalar_aggfunc_single_trigger(self): """TestAggregator: 1 trigger scalar aggfunc""" def aggfunc(series): return series.sum() filters = {"result": "fire"} event_class = self._trace.aim_and_fire value = 1 pivot = "identifier" trigger = Trigger(self._trace, event_class, filters, value, pivot) aggregator = MultiTriggerAggregator([trigger], self.topology, aggfunc=aggfunc) # There are three "fire" in total # The all level in topology looks like # [[0, 1]] result = aggregator.aggregate(level="all") self.assertEqual(result, [3.0]) # There are two "fire" on the first node group and a # a single "fire" on the second node group at the cluster # level which looks like # [[0], [1]] result = aggregator.aggregate(level="cluster") self.assertEqual(result, [2.0, 1.0])
def test_trigger_generation(self): """TestTrigger: generate""" filters = {"result": "fire"} event_class = self._trace.aim_and_fire value = 1 pivot = "identifier" trigger = Trigger(self._trace, event_class, filters, value, pivot) expected = pd.Series([1, 1], index=pd.Index([0.1, 0.3], name="Time")) assert_series_equal(expected, trigger.generate(0)) expected = pd.Series([1], index=pd.Index([0.5], name="Time")) assert_series_equal(expected, trigger.generate(1))
def sched_switch_out_trigger(ftrace, pid, sched_switch_class): """ :param ftrace: A ftrace object with a sched_switch event :type ftrace: :mod:`trappy.ftrace.FTrace` :param pid: The PID of the associated process :type pid: int :param sched_switch_class: The SchedSwitch event class :type sched_switch_class: :mod:`trappy.base.Base` :return: :mod:`trappy.stats.Trigger.Trigger` on the SchedSwitch: OUT for the given PID """ task_out = {} task_out[PREV_PID_FIELD] = pid return Trigger( ftrace, sched_switch_class, # trappy Event Class task_out, # Filter Dictionary SCHED_SWITCH_OUT, # Trigger Value CPU_FIELD) # Primary Pivot
def sched_switch_in_trigger(run, pid, sched_switch_class): """ :param run: A run object with a sched_switch event :type run: :mod:`trappy.run.Run` :param pid: The PID of the associated process :type pid: int :param sched_switch_class: The SchedSwitch event class :type sched_switch_class: :mod:`trappy.base.Base` :return: :mod:`trappy.stats.Trigger.Trigger` on the SchedSwitch: IN for the given PID """ task_in = {} task_in[NEXT_PID_FIELD] = pid return Trigger( run, sched_switch_class, # trappy Event Class task_in, # Filter Dictionary SCHED_SWITCH_IN, # Trigger Value CPU_FIELD) # Primary Pivot
def test_trigger_with_func(self): """Trigger works with a function or lambda as filter""" def my_filter(val): return val.startswith("fi") trigger = Trigger(self._trace, self._trace.aim_and_fire, filters={"result": my_filter}, value=1, pivot="identifier") expected = pd.Series([1], index=pd.Index([0.5], name="Time")) assert_series_equal(expected, trigger.generate(1)) my_filters = {"result": lambda x: x.startswith("bl")} trigger = Trigger(self._trace, self._trace.aim_and_fire, filters=my_filters, value=1, pivot="identifier") expected = pd.Series([1, 1], index=pd.Index([0.4, 0.6], name="Time")) assert_series_equal(expected, trigger.generate(1))
def test_trigger_with_callable_class(self): """Trigger works with a callable class as filter""" class my_filter(object): def __init__(self, val_out): self.prev_val = 0 self.val_out = val_out def __call__(self, val): ret = self.prev_val == self.val_out self.prev_val = val return ret trigger = Trigger(self._trace, self._trace.aim_and_fire, filters={"identifier": my_filter(1)}, value=1, pivot="result") expected = pd.Series([1], index=pd.Index([0.6], name="Time")) assert_series_equal(expected, trigger.generate("blank"))
def test_filter_prev_values(self): """Trigger works with a filter that depends on previous values of the same pivot""" # We generate an example in which we want a trigger whenever the # identifier is no longer 1 for blank class my_filter(object): def __init__(self, val_out): self.prev_val = 0 self.val_out = val_out def __call__(self, val): ret = self.prev_val == self.val_out self.prev_val = val return ret trace = trappy.BareTrace() data = collections.OrderedDict([ (0.1, ["blank", 1]), (0.2, ["fire", 1]), (0.3, ["blank", 0]), # value is no longer 1, trigger (0.4, ["blank", 1]), (0.5, ["fire", 0]), # This should NOT trigger (0.6, ["blank", 0]), # value is no longer 1 for blank, trigger ]) data_frame = pd.DataFrame.from_dict( data, orient="index", ) data_frame.columns = ["result", "identifier"] trace.add_parsed_event("aim_and_fire", data_frame) trigger = Trigger(trace, trace.aim_and_fire, filters={"identifier": my_filter(1)}, value=-1, pivot="result") expected = pd.Series([-1, -1], index=[0.3, 0.6]) assert_series_equal(expected, trigger.generate("blank"))
def test_trigger_generation(self): """TestTrigger: generate""" filters = { "result": "fire" } event_class = self._trace.aim_and_fire value = 1 pivot = "identifier" trigger = Trigger(self._trace, event_class, filters, value, pivot) expected = pd.Series([1, 1], index=pd.Index([0.1, 0.3], name="Time")) assert_series_equal(expected, trigger.generate(0)) expected = pd.Series([1], index=pd.Index([0.5], name="Time")) assert_series_equal(expected, trigger.generate(1))
def test_filter_prev_values(self): """Trigger works with a filter that depends on previous values of the same pivot""" # We generate an example in which we want a trigger whenever the # identifier is no longer 1 for blank class my_filter(object): def __init__(self, val_out): self.prev_val = 0 self.val_out = val_out def __call__(self, val): ret = self.prev_val == self.val_out self.prev_val = val return ret trace = trappy.BareTrace() data = collections.OrderedDict([ (0.1, ["blank", 1]), (0.2, ["fire", 1]), (0.3, ["blank", 0]), # value is no longer 1, trigger (0.4, ["blank", 1]), (0.5, ["fire", 0]), # This should NOT trigger (0.6, ["blank", 0]), # value is no longer 1 for blank, trigger ]) data_frame = pd.DataFrame.from_dict(data, orient="index", ) data_frame.columns = ["result", "identifier"] trace.add_parsed_event("aim_and_fire", data_frame) trigger = Trigger(trace, trace.aim_and_fire, filters={"identifier": my_filter(1)}, value=-1, pivot="result") expected = pd.Series([-1, -1], index=[0.3, 0.6]) assert_series_equal(expected, trigger.generate("blank"))
def sched_switch_out_trigger(run, pid, sched_switch_class): """ Args: run (trappy.Run): A run object with SchedSwitch event pid (int): pid of the associated task sched_switch_class (trappy.Base): The SchedSwitch class Returns: Trigger on the SchedSwitch: OUT """ task_out = {} task_out[PREV_PID_FIELD] = pid return Trigger( run, sched_switch_class, # trappy Event Class task_out, # Filter Dictionary SCHED_SWITCH_OUT, # Trigger Value CPU_FIELD) # Primary Pivot