Пример #1
0
def check_equals_true(self, code):
    keyword = Literal("true") | Literal("false")
    statement_parser = Group("==" + keyword) | Group(keyword + "==")
    if len(statement_parser.searchString(code)):
        self.add_error(label="EQUALS_TRUE")
Пример #2
0
 def check_equals_true(self, code):
     variable = Word(alphanums)
     keyword = Literal("true") | Literal("false")
     statement_parser = Group(variable + "==" + keyword) | Group(keyword + "==" + variable)
     if len(statement_parser.searchString(code)):
         self.add_error("EQUALS_TRUE") 
Пример #3
0
class Parser(object):

    """A parser class for solving simple
    data accesses and super-indexing data

    :param data: Trace Object
    :type data: instance of :mod:`trappy.ftrace.BareTrace` or a child
        class (like :mod:`trappy.ftrace.FTrace`)

    :param pvars: A dictionary of variables that need to be
        accessed from within the grammar
    :type pvars: dict

    :param method: The method to be used for reindexing data
        This can be one of the standas :mod:`pandas.DataFrame`
        methods (eg. pad, bfill, nearest). The default is pad
        or use the last valid observation.
    :type method: str

    :param limit: The number of indices a value will be propagated
        when reindexing. The default is None
    :type limit: int

    :param fill: Whether to fill the NaNs in the data.
        The default value is True.
    :type fill: bool

    :param window: A window of time in which to apply the data
        accesses.  By default the data accesses happen accross the
        whole trace.  With the window parameter you can limit it to a
        window of time inside the trace.  The first element of the
        tuple is the starting time and the second the ending time (set
        to None for end of trace).

    :type window: tuple

    - **Operators**

        +----------------+----------------------+---------------+
        | Operation      |      operator        | Associativity |
        +================+======================+===============+
        | Exponentiation | \*\*                 |    Left       |
        +----------------+----------------------+---------------+
        |Unary           | \-                   |    Right      |
        +----------------+----------------------+---------------+
        | Multiply/Divide| \*, /, //, %         |    Left       |
        +----------------+----------------------+---------------+
        | Add/Subtract   | +, \-,               |    Left       |
        +----------------+----------------------+---------------+
        | Comparison     | >, <, >=, <=, ==, != |    Left       |
        +----------------+----------------------+---------------+
        | Logical        | &&, ||, \|, &        |    Left       |
        +----------------+----------------------+---------------+

    - **Data Accessors**

        Since the goal of the grammar is to provide an
        easy language to access and compare data
        from a :mod:`trappy.trace.FTrace` object. The parser provides
        a simple notation to access this data.

        *Statically Defined Events*
        ::

            import trappy
            from trappy.stats.grammar import Parser

            trace = trappy.FTrace("path/to/trace/file")
            parser = Parser(trace)
            parser.solve("trappy.thermal.Thermal:temp * 2")

        *Aliasing*
        ::

            import trappy
            from trappy.stats.grammar import Parser

            pvars = {}
            pvars["THERMAL"] = trappy.thermal.Thermal
            trace = trappy.FTrace("path/to/trace/file")
            parser = Parser(trace)
            parser.solve("THERMAL:temp * 2")

        *Using Event Name*
        ::

            import trappy
            from trappy.stats.grammar import Parser
            trace = trappy.FTrace("path/to/trace/file")
            parser = Parser(trace)
            parser.solve("thermal:temp * 2")

        The event :mod:`trappy.thermal.Thermal` is aliased
        as **THERMAL** in the grammar

        *Dynamic Events*
        ::

            import trappy
            from trappy.stats.grammar import Parser

            # Register Dynamic Event
            cls = trappy.register_dynamic_ftrace("my_unique_word", "event_name")

            pvars = {}
            pvars["CUSTOM"] = cls
            trace = trappy.FTrace("path/to/trace/file")
            parser = Parser(trace)
            parser.solve("CUSTOM:col * 2")

        .. seealso:: :mod:`trappy.dynamic.register_dynamic_ftrace`

    """

    def __init__(self, data, pvars=None, window=(0, None), **kwargs):
        if pvars is None:
            pvars = {}

        self.data = data
        self._pvars = pvars
        self._accessor = Group(
            FUNC_NAME + COLON + IDENTIFIER).setParseAction(self._pre_process)
        self._inspect = Group(
            FUNC_NAME + COLON + IDENTIFIER).setParseAction(self._parse_for_info)
        self._parse_expr = get_parse_expression(
            self._parse_func, self._parse_var_id)
        self._agg_df = pd.DataFrame()
        self._pivot_set = set()
        self._limit = kwargs.get("limit", StatConf.REINDEX_LIMIT_DEFAULT)
        self._method = kwargs.get("method", StatConf.REINDEX_METHOD_DEFAULT)
        self._fill = kwargs.get("fill", StatConf.NAN_FILL_DEFAULT)
        self._window = window

    def solve(self, expr):
        """Parses and solves the input expression

        :param expr: The input expression
        :type expr: str

        :return: The return type may vary depending on
            the expression. For example:

            **Vector**
            ::

                import trappy
                from trappy.stats.grammar import Parser

                trace = trappy.FTrace("path/to/trace/file")
                parser = Parser(trace)
                parser.solve("trappy.thermal.Thermal:temp * 2")

            **Scalar**
            ::

                import trappy
                from trappy.stats.grammar import Parser

                trace = trappy.FTrace("path/to/trace/file")
                parser = Parser(trace)
                parser.solve("numpy.mean(trappy.thermal.Thermal:temp)")

            **Vector Mask**
            ::

                import trappy
                from trappy.stats.grammar import Parser

                trace = trappy.FTrace("path/to/trace/file")
                parser = Parser(trace)
                parser.solve("trappy.thermal.Thermal:temp > 65000")
        """

        # Pre-process accessors for indexing
        self._accessor.searchString(expr)
        return self._parse_expr.parseString(expr)[0]


        """

        # Pre-process accessors for indexing
        self._accessor.searchString(expr)
        return self._parse_expr.parseString(expr)[0]


        """

        # Pre-process accessors for indexing
        self._accessor.searchString(expr)
        return self._parse_expr.parseString(expr)[0]

    def _pivot(self, cls, column):
        """Pivot Data for concatenation"""

        data_frame = self._get_data_frame(cls)
        data_frame = handle_duplicate_index(data_frame)
        new_index = self._agg_df.index.union(data_frame.index)

        if hasattr(cls, "pivot") and cls.pivot:
            pivot = cls.pivot
            pivot_vals = list(np.unique(data_frame[pivot].values))
            data = {}


            for val in pivot_vals:
                data[val] = data_frame[data_frame[pivot] == val][[column]]
                if len(self._agg_df):
                    data[val] = data[val].reindex(
                        index=new_index,
                        method=self._method,
                        limit=self._limit)

            return pd.concat(data, axis=1).swaplevel(0, 1, axis=1)

        if len(self._agg_df):
            data_frame = data_frame.reindex(
                index=new_index,
                method=self._method,
                limit=self._limit)

        return pd.concat({StatConf.GRAMMAR_DEFAULT_PIVOT: data_frame[
                         [column]]}, axis=1).swaplevel(0, 1, axis=1)

    def _pre_process(self, tokens):
        """Pre-process accessors for super-indexing"""

        params = tokens[0]
        if params[1] in self._agg_df.columns:
            return self._agg_df[params[1]]

        cls = params[0]
        column = params[1]

        if cls in self._pvars:
            cls = self._pvars[cls]
        elif cls in self.data.class_definitions:
            cls = self.data.class_definitions[cls]
        else:
            cls = str_to_attr(cls)

        data_frame = self._pivot(cls, column)
        self._agg_df = pd.concat(
            [self._agg_df, data_frame], axis=1)

        if self._fill:
            self._agg_df = self._agg_df.fillna(method="pad")

        return self._agg_df[params[1]]

    def _parse_for_info(self, tokens):
        """Parse Action for inspecting data accessors"""

        params = tokens[0]
        cls = params[0]
        column = params[1]
        info = {}
        info["pivot"] = None
        info["pivot_values"] = None

        if cls in self._pvars:
            cls = self._pvars[cls]
        elif cls in self.data.class_definitions:
            cls = self.data.class_definitions[cls]
        else:
            cls = str_to_attr(cls)

        data_frame = self._get_data_frame(cls)

        info["class"] = cls
        info["length"] = len(data_frame)
        if hasattr(cls, "pivot") and cls.pivot:
            info["pivot"] = cls.pivot
            info["pivot_values"] = list(np.unique(data_frame[cls.pivot]))
        info["column"] = column
        info["column_present"] = column in data_frame.columns
        return info

    def _parse_var_id(self, tokens):
        """A function to parse a variable identifier
        """

        params = tokens[0]
        try:
            return float(params)
        except (ValueError, TypeError):
            try:
                return self._pvars[params]
            except KeyError:
                return self._agg_df[params[1]]

    def _parse_func(self, tokens):
        """A function to parse a function string"""

        params = tokens[0]
        func_name = params[0]
        if func_name in self._pvars and isinstance(
                self._pvars[func_name],
                types.FunctionType):
            func = self._pvars[func_name]
        else:
            func = str_to_attr(params[0])
        return func(*params[1])

    def _get_data_frame(self, cls):
        """Get the data frame from the BareTrace object, applying the window
        if set"""

        data_frame = getattr(self.data, cls.name).data_frame

        if self._window[1] is None:
            data_frame = data_frame.loc[self._window[0]:]
        else:
            data_frame = data_frame.loc[self._window[0]:self._window[1]]

        return data_frame

    def ref(self, mask):
        """Reference super indexed data with a boolean mask

        :param mask: A boolean :mod:`pandas.Series` that
            can be used to reference the aggregated data in
            the parser
        :type mask: :mod:`pandas.Series`

        :return: aggregated_data[mask]
        """

        return self._agg_df[mask]

    def inspect(self, accessor):
        """A function to inspect the accessor for information

        :param accessor: A data accessor of the format
            <event>:<column>
        :type accessor: str

        :return: A dictionary of information
        """
        return self._inspect.parseString(accessor)[0]
Пример #4
0
class Parser(object):
    """A parser class for solving simple
    data accesses and super-indexing data

    :param pvars: A dictionary of variables that need to be
        accessed from within the grammar
    :type pvars: dict

    :param topology: Future support for the usage of topologies in
        grammar
    :type topology: :mod:`trappy.stats.Topology`


    - **Operators**

        +----------------+----------------------+---------------+
        | Operation      |      operator        | Associativity |
        +================+======================+===============+
        |Unary           | \-                   |    Right      |
        +----------------+----------------------+---------------+
        | Multiply/Divide| \*, /                |    Left       |
        +----------------+----------------------+---------------+
        | Add/Subtract   | +, \-,               |    Left       |
        +----------------+----------------------+---------------+
        | Comparison     | >, <, >=, <=, ==, != |    Left       |
        +----------------+----------------------+---------------+
        | Logical        | &&, ||, \|, &        |    Left       |
        +----------------+----------------------+---------------+

    - **Data Accessors**

        Since the goal of the grammar is to provide an
        easy language to access and compare data
        from a :mod:`trappy.run.Run` object. The parser provides
        a simple notation to access this data.

        *Statically Defined Events*
        ::

            import trappy
            from trappy.stats.grammar import Parser

            run = trappy.Run("path/to/trace/file")
            parser = Parser(run)
            parser.solve("trappy.thermal.Thermal:temp * 2")

        *Aliasing*
        ::

            import trappy
            from trappy.stats.grammar import Parser

            pvars = {}
            pvars["THERMAL"] = trappy.thermal.Thermal
            run = trappy.Run("path/to/trace/file")
            parser = Parser(run)
            parser.solve("THERMAL:temp * 2")

        The event :mod:`trappy.thermal.Thermal` is aliased
        as **THERMAL** in the grammar

        *Dynamic Events*
        ::

            import trappy
            from trappy.stats.grammar import Parser

            # Register Dynamic Event
            cls = trappy.register_dynamic("my_unique_word", "event_name")

            pvars = {}
            pvars["CUSTOM"] = cls
            run = trappy.Run("path/to/trace/file")
            parser = Parser(run)
            parser.solve("CUSTOM:col * 2")

        .. seealso:: :mod:`trappy.dynamic.register_dynamic`

    """
    def __init__(self, data, pvars=None, topology=None):
        if pvars is None:
            pvars = {}

        self.data = data
        self._pvars = pvars
        self._accessor = Group(FUNC_NAME + COLON + IDENTIFIER).setParseAction(
            self._pre_process)
        self._parse_expr = get_parse_expression(self._parse_func,
                                                self._parse_var_id)
        self._agg_df = pd.DataFrame()
        if not topology:
            self.topology = Topology()
        else:
            self.topology = topology
        self._pivot_set = set()
        self._index_limit = None

    def solve(self, expr):
        """Parses and solves the input expression

        :param expr: The input expression
        :type expr: str

        :return: The return type may vary depending on
            the expression. For example:

            **Vector**
            ::

                import trappy
                from trappy.stats.grammar import Parser

                run = trappy.Run("path/to/trace/file")
                parser = Parser(run)
                parser.solve("trappy.thermal.Thermal:temp * 2")

            **Scalar**
            ::

                import trappy
                from trappy.stats.grammar import Parser

                run = trappy.Run("path/to/trace/file")
                parser = Parser(run)
                parser.solve("numpy.mean(trappy.thermal.Thermal:temp)")

            **Vector Mask**
            ::

                import trappy
                from trappy.stats.grammar import Parser

                run = trappy.Run("path/to/trace/file")
                parser = Parser(run)
                parser.solve("trappy.thermal.Thermal:temp > 65000")
        """

        # Pre-process accessors for indexing
        self._accessor.searchString(expr)
        return self._parse_expr.parseString(expr)[0]
        """

        # Pre-process accessors for indexing
        self._accessor.searchString(expr)
        return self._parse_expr.parseString(expr)[0]


        """

        # Pre-process accessors for indexing
        self._accessor.searchString(expr)
        return self._parse_expr.parseString(expr)[0]

    def _pivot(self, cls, column):
        """Pivot Data for concatenation"""

        data_frame = getattr(self.data, cls.name).data_frame

        if hasattr(cls, "pivot") and cls.pivot:
            pivot = cls.pivot
            pivot_vals = list(np.unique(data_frame[pivot].values))
            data = {}

            for val in pivot_vals:
                data[val] = data_frame[data_frame[pivot] == val][[column]]
                if len(self._agg_df):
                    data[val] = data[val].reindex(index=self._agg_df.index,
                                                  method="nearest",
                                                  limit=1)

            return pd.concat(data, axis=1).swaplevel(0, 1, axis=1)

        if len(self._agg_df):
            data_frame = data_frame.reindex(index=self._agg_df.index,
                                            method="nearest",
                                            limit=1)

        return pd.concat({
            StatConf.GRAMMAR_DEFAULT_PIVOT: data_frame[[column]]
        },
                         axis=1).swaplevel(0, 1, axis=1)

    def _pre_process(self, tokens):
        """Pre-process accessors for super-indexing"""

        params = tokens[0]
        if params[1] in self._agg_df.columns:
            return self._agg_df[params[1]]

        cls = params[0]
        column = params[1]

        if cls in self._pvars:
            cls = self._pvars[cls]
        else:
            cls = str_to_attr(cls)

        data_frame = self._pivot(cls, column)
        self._agg_df = pd.concat([self._agg_df, data_frame], axis=1)

        return self._agg_df[params[1]]

    def _parse_var_id(self, tokens):
        """A function to parse a variable identifier
        """

        params = tokens[0]
        try:
            return float(params)
        except (ValueError, TypeError):
            try:
                return self._pvars[params]
            except KeyError:
                return self._agg_df[params[1]]

    def _parse_func(self, tokens):
        """A function to parse a function string"""

        params = tokens[0]
        func_name = params[0]
        if func_name in self._pvars and isinstance(self._pvars[func_name],
                                                   types.FunctionType):
            func = self._pvars[func_name]
        else:
            func = str_to_attr(params[0])
        return func(*params[1])

    def ref(self, mask):
        """Reference super indexed data with a boolean mask

        :param mask: A boolean :mod:`pandas.Series` that
            can be used to reference the aggregated data in
            the parser
        :type mask: :mod:`pandas.Series`

        :return: aggregated_data[mask]
        """

        return self._agg_df[mask]