Exemplo n.º 1
0
    def __init__(self,
                 runs,
                 columns,
                 templates,
                 pivot,
                 filters,
                 zip_constraints=True):

        self._ip_vec = []
        self._ip_vec.append(listify(runs))
        self._ip_vec.append(listify(columns))
        self._ip_vec.append(listify(templates))

        self._lens = map(len, self._ip_vec)
        self._max_len = max(self._lens)
        self._pivot = pivot
        self._filters = filters
        self._constraints = []

        self._run_expanded = False
        self._expand()
        if zip_constraints:
            self._populate_zip_constraints()
        else:
            self._populate_constraints()
Exemplo n.º 2
0
    def __init__(self, run, topology, execnames):

        self._execnames = listify(execnames)
        self._run = Utils.init_run(run)
        self._pids = self._populate_pids()
        self._topology = topology
        self._asserts = self._populate_asserts()
        self._populate_methods()
Exemplo n.º 3
0
    def __init__(self, triggers):
        """
            Args:
                triggers (stat.Trigger): A list (or single) trigger
        """

        self._triggers = listify(triggers)
        super(MultiTriggerIndexer, self).__init__(self._unify())
Exemplo n.º 4
0
    def __init__(self, run, topology, execnames):

        self._execnames = listify(execnames)
        self._run = Utils.init_run(run)
        self._pids = self._populate_pids()
        self._topology = topology
        self._asserts = self._populate_asserts()
        self._populate_methods()
Exemplo n.º 5
0
 def assertFirstCpu(self, cpus, window=None):
     """
     Args:
         cpus (int, list): A list of acceptable CPUs
         window (tuple): A (start, end) tuple to limit the scope
             of the calculation
     """
     first_cpu = self.getFirstCpu(window=window)
     cpus = listify(cpus)
     return first_cpu in cpus
Exemplo n.º 6
0
 def assertFirstCpu(self, cpus, window=None):
     """
     Args:
         cpus (int, list): A list of acceptable CPUs
         window (tuple): A (start, end) tuple to limit the scope
             of the calculation
     """
     first_cpu = self.getFirstCpu(window=window)
     cpus = listify(cpus)
     return first_cpu in cpus
Exemplo n.º 7
0
    def __init__(self, runs, columns, templates, pivot, filters,
                 zip_constraints=True):

        self._ip_vec = []
        self._ip_vec.append(listify(runs))
        self._ip_vec.append(listify(columns))
        self._ip_vec.append(listify(templates))

        self._lens = map(len, self._ip_vec)
        self._max_len = max(self._lens)
        self._pivot = pivot
        self._filters = filters
        self._constraints = []

        self._run_expanded = False
        self._expand()
        if zip_constraints:
            self._populate_zip_constraints()
        else:
            self._populate_constraints()
Exemplo n.º 8
0
    def _check_data(self):
        """Internal function to check the received data"""

        data = listify(self.runs)

        if len(data):
            mask = map(lambda x: isinstance(x, DataFrame), data)
            data_frame = reduce(lambda x, y: x and y, mask)
            if not data_frame and not self.templates:
                raise ValueError(
                    "Cannot understand data. Accepted DataFormats are pandas.DataFrame and trappy.Run (with templates)")
        else:
            raise ValueError("Empty Data received")
Exemplo n.º 9
0
    def __init__(self, run, topology, execnames):
        """Args:
                run (trappy.Run): A single trappy.Run object
                    or a path that can be passed to trappy.Run
                topology(trappy.stats.Topology): The CPU topology
                execname(str, list): List of execnames or single task
        """

        self._execnames = listify(execnames)
        self._run = Utils.init_run(run)
        self._pids = self._populate_pids()
        self._topology = topology
        self._asserts = self._populate_asserts()
        self._populate_methods()
Exemplo n.º 10
0
    def __init__(self, run, topology, execnames):
        """Args:
                run (trappy.Run): A single trappy.Run object
                    or a path that can be passed to trappy.Run
                topology(trappy.stats.Topology): The CPU topology
                execname(str, list): List of execnames or single task
        """

        self._execnames = listify(execnames)
        self._run = Utils.init_run(run)
        self._pids = self._populate_pids()
        self._topology = topology
        self._asserts = self._populate_asserts()
        self._populate_methods()
Exemplo n.º 11
0
    def assertFirstCpu(self, cpus, window=None):
        """
        Check if the Task started (first ran on in the duration
        of the trace) on a particular CPU(s)

        :param cpus: A list of acceptable CPUs
        :type cpus: int, list

        .. seealso:: :mod:`bart.sched.SchedAssert.SchedAssert.getFirstCPU`
        """

        first_cpu = self.getFirstCpu(window=window)
        cpus = listify(cpus)
        return first_cpu in cpus
Exemplo n.º 12
0
    def assertFirstCpu(self, cpus, window=None):
        """
        Check if the Task started (first ran on in the duration
        of the trace) on a particular CPU(s)

        :param cpus: A list of acceptable CPUs
        :type cpus: int, list

        .. seealso:: :mod:`bart.sched.SchedAssert.SchedAssert.getFirstCPU`
        """

        first_cpu = self.getFirstCpu(window=window)
        cpus = listify(cpus)
        return first_cpu in cpus
Exemplo n.º 13
0
def apply_filter_kv(key, value, data_frame, mask):
    """Internal function to apply a key value
       filter to a data_frame and update the initial
       condition provided in mask.

       Returns:
           Mask to index the data frame
    """

    value = listify(value)
    if key not in data_frame.columns:
        return mask
    else:
        for val in value:
            mask = mask & (data_frame[key] == val)
        return mask
Exemplo n.º 14
0
    def aggregate(self, **kwargs):
        """
            Aggregate implementation that aggrgates
            triggers for a given topological level

            Args:
                level can be specified. If not the default level is
                taken to be all

            Returns:
                A scalar or a vector aggregated result.
                Each group in the level produces an element
                in the result list with a one to one
                index correspondence

                groups["level"] = [[1,2], [3,4]]
                result = [result_1, result_2]
        """

        level = kwargs.pop("level", "all")

        # This function is a hot spot in the code. It is
        # worth considering a memoize decorator to cache
        # the function. The memoization can also be
        # maintained by the aggregator object. This will
        # help the code scale efficeintly
        level_groups = self.topology.get_level(level)
        result = []

        if not self._aggregated:
            self._aggregate_base()

        for group in level_groups:
            group = listify(group)
            level_res = self._aggfunc(self._result[group[0]], **kwargs)

            for node in group[1:]:
                if self._aggfunc is not None:
                    node_res = self._aggfunc(self._result[node], **kwargs)
                else:
                    node_res = self._result[node]

                level_res += node_res

            result.append(level_res)

        return result
Exemplo n.º 15
0
    def __init__(self, run, topology, execnames=None, pids=None):

        self._run = Utils.init_run(run)
        self._topology = topology

        if execnames and pids:
            raise ValueError('Either pids or execnames must be specified')
        if execnames:
            self._execnames = listify(execnames)
            self._pids = self._populate_pids()
        elif pids:
            self._pids = pids
        else:
            raise ValueError('One of PIDs or execnames must be specified')

        self._asserts = self._populate_asserts()
        self._populate_methods()
Exemplo n.º 16
0
    def __init__(self, run, topology, execnames=None, pids=None):

        self._run = Utils.init_run(run)
        self._topology = topology

        if execnames and pids:
            raise ValueError('Either pids or execnames must be specified')
        if execnames:
            self._execnames = listify(execnames)
            self._pids = self._populate_pids()
        elif pids:
            self._pids = pids
        else:
            raise ValueError('One of PIDs or execnames must be specified')

        self._asserts = self._populate_asserts()
        self._populate_methods()
Exemplo n.º 17
0
    def aggregate(self, **kwargs):
        """
        Aggregate implementation that aggregates
        triggers for a given topological level. All the arguments passed to
        it are forwarded to the aggregator function except level (if present)

        :return: A scalar or a vector aggregated result. Each group in the
            level produces an element in the result list with a one to one
            index correspondence
            ::

                groups["level"] = [[1,2], [3,4]]
                result = [result_1, result_2]
        """

        level = kwargs.pop("level", "all")

        # This function is a hot spot in the code. It is
        # worth considering a memoize decorator to cache
        # the function. The memoization can also be
        # maintained by the aggregator object. This will
        # help the code scale efficeintly
        level_groups = self.topology.get_level(level)
        result = []


        if not self._aggregated:
            self._aggregate_base()

        for group in level_groups:
            group = listify(group)
            level_res = self._aggfunc(self._result[group[0]], **kwargs)

            for node in group[1:]:
                if self._aggfunc is not None:
                    node_res = self._aggfunc(self._result[node], **kwargs)
                else:
                    node_res = self._result[node]

                level_res += node_res

            result.append(level_res)

        return result
Exemplo n.º 18
0
    def __init__(self,
                 reference_trace,
                 trace,
                 topology,
                 execnames,
                 aggfunc=sconf.csum):

        run = Utils.init_run(trace)
        reference_run = Utils.init_run(reference_trace)

        self._execnames = listify(execnames)
        self._reference_pids = self._populate_pids(reference_run)
        self._pids = self._populate_pids(run)
        self._dimension = len(self._pids)
        self._topology = topology
        self._matrix = self._generate_matrix(run, reference_run, aggfunc)

        if len(self._pids) != len(self._reference_pids):
            raise RuntimeError(
                "The runs do not have the same number of PIDs for {0}".format(
                    str(execnames)))
Exemplo n.º 19
0
    def __init__(
            self,
            reference_trace,
            trace,
            topology,
            execnames,
            aggfunc=sconf.csum):

        run = Utils.init_run(trace)
        reference_run = Utils.init_run(reference_trace)

        self._execnames = listify(execnames)
        self._reference_pids = self._populate_pids(reference_run)
        self._pids = self._populate_pids(run)
        self._dimension = len(self._pids)
        self._topology = topology
        self._matrix = self._generate_matrix(run, reference_run, aggfunc)

        if len(self._pids) != len(self._reference_pids):
            raise RuntimeError(
                "The runs do not have the same number of PIDs for {0}".format(
                    str(execnames)))
Exemplo n.º 20
0
def apply_filter_kv(key, value, data_frame, mask):
    """Internal function to apply a key value
    filter to a data_frame and update the initial
    condition provided in mask.

    :param value: The value to checked for

    :param data_frame: The data to be filtered
    :type data_frame: :mod:`pandas.DataFrame`

    :param mask: Initial Condition Mask
    :type mask: :mod:`pandas.Series`

    :return: A **mask** to index the data frame
    """

    value = listify(value)
    if key not in data_frame.columns:
        return mask
    else:
        for val in value:
            mask = mask & (data_frame[key] == val)
        return mask
Exemplo n.º 21
0
def apply_filter_kv(key, value, data_frame, mask):
    """Internal function to apply a key value
    filter to a data_frame and update the initial
    condition provided in mask.

    :param value: The value to checked for

    :param data_frame: The data to be filtered
    :type data_frame: :mod:`pandas.DataFrame`

    :param mask: Initial Condition Mask
    :type mask: :mod:`pandas.Series`

    :return: A **mask** to index the data frame
    """

    value = listify(value)
    if key not in data_frame.columns:
        return mask
    else:
        for val in value:
            mask = mask & (data_frame[key] == val)
        return mask
Exemplo n.º 22
0
    def __init__(self, triggers):

        self._triggers = listify(triggers)
        super(MultiTriggerIndexer, self).__init__(self._unify())
Exemplo n.º 23
0
    def __init__(self, triggers):

        self._triggers = listify(triggers)
        super(MultiTriggerIndexer, self).__init__(self._unify())