コード例 #1
0
 def test_exp_ops(self):
     """Test exponentiation: Numeric"""
     parser = Parser(trappy.BareTrace())
     eqn = "3**3 * 2**4"
     self.assertEqual(parser.solve(eqn), 432)
     eqn = "3**(4/2)"
     self.assertEqual(parser.solve(eqn), 9)
コード例 #2
0
    def test_filtered_parse(self):
        """The Parser can filter a trace"""
        trace = trappy.FTrace()

        prs = Parser(trace, filters={"cdev_state": 3})
        dfr_res = prs.solve("devfreq_out_power:freq")
        self.assertEqual(len(dfr_res), 1)
コード例 #3
0
    def test_mul_ops(self):
        """Test Mult and Division: Numeric"""

        parser = Parser(trappy.Run())
        eqn = "(10 * 2 / 10)"
        self.assertEquals(parser.solve(eqn), 2)
        eqn = "-2 * 2 + 2 * 10 / 10"
        self.assertEquals(parser.solve(eqn), -2)
コード例 #4
0
    def test_single_func_call(self):
        """Test Single Function Call"""

        thermal_zone_id = 0
        parser = Parser(trappy.FTrace())
        eqn = "numpy.mean(trappy.thermal.Thermal:temp)"
        self.assertEqual(
            parser.solve(eqn)[thermal_zone_id],
            np.mean(parser.data.thermal.data_frame["temp"]))
コード例 #5
0
    def test_var_forward(self):
        """Test Forwarding: Variable"""

        thermal_zone_id = 0
        pvars = {}
        pvars["control_temp"] = 78000
        parser = Parser(trappy.Run(), pvars=pvars)
        eqn = "numpy.mean(trappy.thermal.Thermal:temp) < control_temp"
        self.assertTrue(parser.solve(eqn)[thermal_zone_id])
コード例 #6
0
    def test_no_events(self):
        """Test trying to parse absent data"""
        trace = trappy.FTrace()
        prs = Parser(trace)

        # cpu_frequency is an event we know how to parse, but it isn't present
        # in the test trace.
        self.assertRaisesRegexp(ValueError,
                                "No events found for cpu_frequency", prs.solve,
                                "cpu_frequency:frequency")
コード例 #7
0
    def test_func_forward(self):
        """Test Forwarding: Mixed"""

        thermal_zone_id = 0
        pvars = {}
        pvars["mean"] = np.mean
        pvars["control_temp"] = 78000
        parser = Parser(trappy.Run(), pvars=pvars)
        eqn = "mean(trappy.thermal.Thermal:temp) < control_temp"
        self.assertTrue(parser.solve(eqn)[thermal_zone_id])
コード例 #8
0
    def test_bool_ops_scalar(self):
        """Test Logical Operations: Vector"""

        thermal_zone_id=0
        parser = Parser(trappy.Run())
        # The equation returns a boolean scalar
        eqn = "(numpy.mean(trappy.thermal.Thermal:temp) > 65000) && (numpy.mean(trappy.cpu_power.CpuOutPower) > 500)"
        self.assertTrue(parser.solve(eqn)[thermal_zone_id])
        eqn = "(numpy.mean(trappy.thermal.Thermal:temp) > 65000) || (numpy.mean(trappy.cpu_power.CpuOutPower) < 500)"
        self.assertTrue(parser.solve(eqn)[thermal_zone_id])
コード例 #9
0
    def test_funcparams_mul(self):
        """Test Mult and Division: Data"""

        thermal_zone_id = 0
        parser = Parser(trappy.Run())
        eqn = "trappy.thermal.Thermal:temp * 10.0"
        series = parser.data.thermal.data_frame["temp"]
        assert_series_equal(parser.solve(eqn)[thermal_zone_id], series * 10.0)
        eqn = "trappy.thermal.Thermal:temp / trappy.thermal.Thermal:temp * 10"
        assert_series_equal(parser.solve(eqn)[thermal_zone_id], series / series * 10)
コード例 #10
0
    def test_sum_operator(self):
        """Test Addition And Subtraction: Numeric"""

        parser = Parser(trappy.Run())
        # Simple equation
        eqn = "10 + 2 - 3"
        self.assertEquals(parser.solve(eqn), 9)
        # Equation with bracket and unary ops
        eqn = "(10 + 2) - (-3 + 2)"
        self.assertEquals(parser.solve(eqn), 13)
コード例 #11
0
    def test_parser_with_name(self):
        """Test equation using event name"""

        thermal_zone_id = 0
        parser = Parser(trappy.FTrace())
        # Equation with functions as parameters (Mixed)
        eqn = "numpy.mean(thermal:temp) + 1000"
        self.assertEqual(
            parser.solve(eqn)[thermal_zone_id],
            np.mean(parser.data.thermal.data_frame["temp"]) + 1000)
コード例 #12
0
    def test_bool_ops_vector(self):
        """Test Logical Operations: Vector"""

        thermal_zone_id = 0
        # The equation returns a vector mask
        parser = Parser(trappy.Run())
        eqn = "(trappy.thermal.ThermalGovernor:current_temperature > 77000)\
                & (trappy.pid_controller.PIDController:output > 2500)"
        mask = parser.solve(eqn)
        self.assertEquals(len(parser.ref(mask.dropna()[0])), 0)
コード例 #13
0
    def test_super_indexing(self):
        "Test if super-indexing works correctly" ""

        trace = trappy.FTrace()
        parser = Parser(trace)
        # The first event has less index values
        sol1 = parser.solve("trappy.thermal.Thermal:temp")
        # The second index has more index values
        sol2 = parser.solve("trappy.pid_controller.PIDController:output")
        # Super Indexing should result in len(sol2) > len(sol1)
        self.assertGreater(len(sol2), len(sol1))
コード例 #14
0
    def test_mul_ops(self):
        """Test Mult and Division: Numeric"""

        parser = Parser(trappy.BareTrace())
        eqn = "(10 * 2 / 10)"
        self.assertEqual(parser.solve(eqn), 2)
        eqn = "-2 * 2 + 2 * 10 / 10"
        self.assertEqual(parser.solve(eqn), -2)
        eqn = "3.5 // 2"
        self.assertEqual(parser.solve(eqn), 1)
        eqn = "5 % 2"
        self.assertEqual(parser.solve(eqn), 1)
コード例 #15
0
    def __init__(self, data, sig_a, sig_b, **kwargs):

        self._parser = Parser(data,
                              config=kwargs.pop("config", None),
                              **kwargs)
        self._a = sig_a
        self._b = sig_b
        self._pivot_vals, self._pivot = self._get_signal_pivots()

        # Concatenate the indices by doing any operation (say add)
        self._a_data = self._parser.solve(sig_a)
        self._b_data = self._parser.solve(sig_b)
コード例 #16
0
    def test_accessors_sum(self):
        """Test Addition And Subtraction: Data"""

        thermal_zone_id = 0
        parser = Parser(trappy.FTrace())
        # Equation with dataframe accessors
        eqn = "trappy.thermal.Thermal:temp + \
trappy.thermal.Thermal:temp"

        assert_series_equal(parser.solve(eqn)[thermal_zone_id],
                            2 * parser.data.thermal.data_frame["temp"],
                            check_names=False)
コード例 #17
0
    def test_windowed_parse(self):
        """Test that the parser can operate on a window of the trace"""
        trace = trappy.FTrace()

        prs = Parser(trace, window=(2, 3))
        dfr_res = prs.solve("thermal:temp")

        self.assertGreater(dfr_res.index[0], 2)
        self.assertLess(dfr_res.index[-1], 3)

        prs = Parser(trace, window=(4, None))
        dfr_res = prs.solve("thermal:temp")

        self.assertGreater(dfr_res.index[0], 4)
        self.assertEqual(dfr_res.index[-1], trace.thermal.data_frame.index[-1])

        prs = Parser(trace, window=(0, 1))
        dfr_res = prs.solve("thermal:temp")

        self.assertEqual(dfr_res.index[0], trace.thermal.data_frame.index[0])
        self.assertLess(dfr_res.index[-1], 1)
コード例 #18
0
    def test_cls_forward(self):
        """Test Forwarding: Classes"""

        cls = trappy.thermal.Thermal
        pvars = {}
        pvars["mean"] = np.mean
        pvars["control_temp"] = 78000
        pvars["therm"] = cls

        thermal_zone_id = 0
        parser = Parser(trappy.Run(), pvars=pvars)
        eqn = "mean(therm:temp) < control_temp"
        self.assertTrue(parser.solve(eqn)[thermal_zone_id])
コード例 #19
0
    def test_for_parsed_event(self):
        """Test if an added parsed event can be accessed"""

        trace = trappy.FTrace(scope="custom")
        dfr = pandas.DataFrame(
            {
                "l1_misses": [24, 535, 41],
                "l2_misses": [155, 11, 200],
                "cpu": [0, 1, 0]
            },
            index=pandas.Series([1.020, 1.342, 1.451], name="Time"))
        trace.add_parsed_event("pmu_counters", dfr)

        p = Parser(trace)
        self.assertTrue(len(p.solve("pmu_counters:cpu")), 3)
コード例 #20
0
    def test_funcparams_sum(self):
        """Test Addition And Subtraction: Functions"""

        thermal_zone_id = 0
        parser = Parser(trappy.FTrace())
        # Equation with functions as parameters (Mixed)
        eqn = "numpy.mean(trappy.thermal.Thermal:temp) + 1000"
        self.assertEqual(
            parser.solve(eqn)[thermal_zone_id],
            np.mean(parser.data.thermal.data_frame["temp"]) + 1000)
        # Multiple func params
        eqn = "numpy.mean(trappy.thermal.Thermal:temp) + numpy.mean(trappy.thermal.Thermal:temp)"
        self.assertEqual(
            parser.solve(eqn)[thermal_zone_id],
            np.mean(parser.data.thermal.data_frame["temp"]) * 2)
コード例 #21
0
    def estimate_from_trace(self, trace):
        """
        Estimate the energy consumption of the system by looking at a trace

        Usese the EAS energy model data, and the idle and DVFS conditions
        reported in the trace, to estimate the energy usage of the system at
        every given moment.

        Takes into account knowledge of power domains - where cpuidle makes
        impossible claims about idle states (e.g. a CPU in 'cluster sleep' while
        its cluster siblings are running), the states will be minimised.

        The accuracy of this is otherwise totally dependent on the accuracy of
        the EAS energy model and the kernel's information. This does not take
        into account cost of idle state of DVFS transitions, nor any other
        conditions that are invisible to the kernel. The effect any power
        decisions that the platform makes independently of the kernel cannot be
        seen in this data. Examples of this _might_ include firmware thermal
        management invisibly restricting CPU frequencies, or secure-world
        software with real-time constraints preventing deep CPU idle states.

        :param trace: The trace
        :type trace: Trace

        :returns: A DataFrame with a column for each node in the energy model,
                  labelled with the CPU members of the node joined by  '-'s.
                  Shows the energy use by each node at each given moment.
                  If you don't care about those details, call ``.sum(axis=1)` on
                  the returned DataFrame to get a Series that shows overall
                  estimated power usage over time.
        """
        if not trace.hasEvents('cpu_idle') or not trace.hasEvents('cpu_frequency'):
            raise ValueError('Requires cpu_idle and cpu_frequency trace events')

        idle = Parser(trace.ftrace).solve('cpu_idle:state')
        freqs = Parser(trace.ftrace).solve('cpu_frequency:frequency')

        columns = ['-'.join(str(c) for c in n.cpus)
                   for n in self.root.iter_nodes()
                   if n.active_states and n.idle_states]

        inputs = pd.concat([idle, freqs], axis=1, keys=['idle', 'freq']).ffill()

        # Drop stuff at the beginning where we don't have the inputs
        # (e.g. where we have had our first cpu_idle event but no cpu_frequency)
        inputs = inputs.dropna()
        # Convert to int wholesale so we can do things like use the values in
        # the inputs DataFrame as list indexes. The only reason we had floats
        # was to make room for NaN, but we've just dropped all the NaNs, so
        # that's fine.
        inputs = inputs.astype(int)
        # Drop consecutive duplicates (optimisation)
        inputs = inputs[(inputs.shift() != inputs).any(axis=1)]

        memo_cache = {}

        def f(input_row):
            # The code in this module is slow. Try not to call it too much.
            memo_key = tuple(input_row)
            if memo_key in memo_cache:
                return memo_cache[memo_key]

            # cpuidle doesn't understand shared resources so it will claim to
            # put a CPU into e.g. 'cluster sleep' while its cluster siblings are
            # active. Rectify those false claims.
            cpus_active = input_row['idle'] == -1
            deepest_possible = self._deepest_idle_idxs(cpus_active)
            idle_idxs = [min(i, j) for i, j in zip(deepest_possible,
                                                        input_row['idle'])]

            # Convert indexes to state names
            idle_states = [n.idle_state_by_idx(max(i, 0))
                           for n, i in zip(self.cpu_nodes, idle_idxs)]

            # We don't use tracked load, we just treat a CPU as active or idle,
            # so set util to 0 or 100%.
            utils = cpus_active * self.capacity_scale

            nrg = self.estimate_from_cpu_util(cpu_utils=utils,
                                              idle_states=idle_states,
                                              freqs=input_row['freq'])

            # nrg is a dict mapping CPU group tuples to energy values.
            # Unfortunately tuples don't play nicely as pandas column labels
            # because parts of its API treat that as nested indexing
            # (i.e. df[(0, 1)] sometimes means df[0][1]). So we'll give them
            # awkward names.

            nrg = {'-'.join(str(c) for c in k): v for k, v in nrg.iteritems()}

            ret = pd.Series(nrg)
            memo_cache[memo_key] = ret
            return ret

        return inputs.apply(f, axis=1)
コード例 #22
0
 def __init__(self, data, config, **kwargs):
     self._parser = Parser(data, config, **kwargs)
コード例 #23
0
 def __init__(self, data, config, topology=None):
     self._parser = Parser(data, config, topology)