Esempio n. 1
0
    def param_info(self):
        self.param_name = None
        self.in_ = None
        self.out_ = None
        self.nocopy_ = None
        self.data_type_ = None
        temp_i = 0
        for i, token in enumerate(self.flatten()):
            if not token.is_whitespace:
                if not self.param_name:
                    temp_i = i
                    self.param_name = token
                    continue
                if not self.in_ and imt(token, m=(T.Keyword, 'IN')):
                    temp_i = i
                    self.in_ = token
                    continue
                if not self.out_ and imt(token, m=(T.Keyword, 'OUT')):
                    temp_i = i
                    self.out_ = token
                    continue
                if self.out_ and token.value == 'NOCOPY':
                    temp_i = i
                    self.nocopy_ = token
                    continue
                if not self.data_type_ and imt(
                        token, m=[(T.Keyword, 'DEFAULT')], t=T.Assignment):
                    _start = list(self.flatten())[temp_i + 2]
                    while _start.parent:
                        if _start.parent == self:
                            break
                        else:
                            _start = _start.parent

                    _end = list(self.flatten())[i - 2]
                    while _end.parent:
                        if _end.parent == self:
                            break
                        else:
                            _end = _end.parent

                    self.data_type_ = self.tokens[self.token_index(_start):self
                                                  .token_index(_end) + 1]
                    # self.data_type_ = list(self.flatten())[temp_i + 2:i - 1]
                    # self.data_type_ = list(self.flatten())[i-1]
        if not self.data_type_:
            # Go to param
            _param = list(self.flatten())[temp_i + 2]
            while _param.parent:
                if _param.parent == self:
                    break
                else:
                    _param = _param.parent
            self.data_type_ = self.tokens[self.token_index(_param
                                                           ):len(self.tokens) +
                                          1]
Esempio n. 2
0
    def group_notfound(self, tlist, cls=sql.NotFound):
        opens = []
        tidx_offset = 0
        for idx, token in enumerate(list(tlist)):
            tidx = idx - tidx_offset

            if token.is_whitespace:
                # ~50% of tokens will be whitespace. Will checking early
                # for them avoid 3 comparisons, but then add 1 more comparison
                # for the other ~50% of tokens...
                continue

            if token.is_group and not isinstance(token, cls):
                # Check inside previously grouped (ie. parenthesis) if group
                # of different type is inside (ie, case). though ideally  should
                # should check for all open/close tokens at once to avoid recursion
                # n = grouping()
                self.group_notfound(token, cls)
                opens = None
                continue

            if token.match(*cls.M_OPEN):
                _prev_idx, _prev_tkn = tlist.token_prev(tidx, skip_cm=True)
                if _prev_tkn and (_prev_tkn.ttype == sql.T.Name
                                  or isinstance(_prev_tkn, sql.Identifier)
                                  or _prev_tkn.ttype == sql.T.Keyword):
                    opens = _prev_idx
            elif imt(token, m=cls.M_CLOSE) and opens:
                close_idx = tidx
                tlist.group_tokens(cls, opens, close_idx)
                tidx_offset += close_idx - opens
            else:
                opens = None
Esempio n. 3
0
 def valid(token):
     if imt(token, t=ttypes, i=sqlcls):
         return True
     elif token and token.is_keyword:
         return True
     else:
         return False
Esempio n. 4
0
 def valid(token):
     if imt(token, t=ttypes, i=sqlcls):
         return True
     elif token and token.is_keyword and token.normalized == 'NULL':
         return True
     else:
         return False
Esempio n. 5
0
    def group_package(self, tlist):
        # pidx, ptoken = tlist.token_next_by(m=sql.PackageHeading.M_OPEN)
        # while ptoken:
        #     tidx, token = tlist.token_next_by(m=sql.PackageHeading.M_NEXT)
        #     if token:
        #         last = tlist.parent.token_last(skip_cm=True)
        #         tlist.parent.group_tokens(sql.Statement, 0, tlist.parent.token_index(last), extend=True)
        #         aidx, token = tlist.token_next_by(m=sql.PackageHeading.M_CLOSE)
        #         tlist.group_tokens(sql.PackageHeading, tidx, aidx)
        #         tlist.group_tokens(sql.Package, pidx, len(tlist.tokens))  # .get_fp()
        #         # print tlist
        #     pidx, ptoken = tlist.token_next_by(m=sql.PackageHeading.M_OPEN, idx=pidx)

        tidx_offset = 0
        for idx, token in enumerate(list(tlist)):
            tidx = idx - tidx_offset

            if token.is_whitespace:
                continue

            if token.is_group and not isinstance(token, sql.Package):
                self.group_package(token)

            if not isinstance(token, sql.Package) and imt(token, m=sql.PackageHeading.M_OPEN):
                iidx, itoken = tlist.token_next_by(m=sql.PackageHeading.M_NEXT, idx=tidx)
                if itoken:
                    last = tlist.parent.token_last(skip_cm=True)
                    tlist.parent.group_tokens(sql.Statement, 0, tlist.parent.token_index(last), extend=True)
                    aidx, token = tlist.token_next_by(m=sql.PackageHeading.M_CLOSE)
                    tlist.group_tokens(sql.PackageHeading, iidx, aidx)
                    tlist.group_tokens(sql.Package, tidx, len(tlist.tokens))  # .get_fp()
                continue
Esempio n. 6
0
    def post(tlist, pidx, tidx, nidx):
        # next_ validation is being performed here. issue261
        sqlcls = sql.SquareBrackets, sql.Function
        ttypes = T.Name, T.String.Symbol, T.Wildcard
        next_ = tlist[nidx] if nidx is not None else None
        valid_next = imt(next_, i=sqlcls, t=ttypes)

        return (pidx, nidx) if valid_next else (pidx, tidx)
Esempio n. 7
0
def group_order(tlist):
    """Group together Identifier and Asc/Desc token"""
    tidx, token = tlist.token_next_by(t=T.Keyword.Order)
    while token:
        pidx, prev_ = tlist.token_prev(tidx)
        if imt(prev_, i=sql.Identifier, t=T.Number):
            tlist.group_tokens(sql.Identifier, pidx, tidx)
            tidx = pidx
        tidx, token = tlist.token_next_by(t=T.Keyword.Order, idx=tidx)
Esempio n. 8
0
def group_comments(tlist):
    tidx, token = tlist.token_next_by(t=T.Comment)
    while token:
        eidx, end = tlist.token_not_matching(
            lambda tk: imt(tk, t=T.Comment) or tk.is_whitespace, idx=tidx)
        if end is not None:
            eidx, end = tlist.token_prev(eidx, skip_ws=False)
            tlist.group_tokens(sql.Comment, tidx, eidx)

        tidx, token = tlist.token_next_by(t=T.Comment, idx=tidx)
Esempio n. 9
0
    def token_next(self, idx, skip_ws=True, skip_cm=False, _reverse=False):
        """Returns the next token relative to *idx*.

        If *skip_ws* is ``True`` (the default) whitespace tokens are ignored.
        If *skip_cm* is ``True`` comments are ignored.
        ``None`` is returned if there's no next token.
        """
        if idx is None:
            return None, None
        idx += 1  # alot of code usage current pre-compensates for this
        funcs = lambda tk: not ((skip_ws and tk.is_whitespace) or
                                (skip_cm and imt(tk, t=T.Comment, i=Comment)))
        return self._token_matching(funcs, idx, reverse=_reverse)
Esempio n. 10
0
    def token_first(self, skip_ws=True, skip_cm=False):
        """Returns the first child token.

        If *skip_ws* is ``True`` (the default), whitespace
        tokens are ignored.

        if *skip_cm* is ``True`` (default: ``False``), comments are
        ignored too.
        """
        # this on is inconsistent, using Comment instead of T.Comment...
        funcs = lambda tk: not ((skip_ws and tk.is_whitespace) or
                                (skip_cm and imt(tk, t=T.Comment, i=Comment)))
        return self._token_matching(funcs)[1]
Esempio n. 11
0
 def get_parameters(self):
     """Return a list of parameters."""
     parenthesis = self.tokens[-1]
     # params = []
     # for token in parenthesis.tokens:
     #     if not (token.is_whitespace or token.match(T.Punctuation, ',')):
     #         params.append(token)
     for token in parenthesis.tokens[1:-1]:
         if isinstance(token, IdentifierList):
             return token.get_identifiers()
         elif imt(token,
                  i=(Function, Identifier, FunctionParam),
                  t=T.Literal):
             return [
                 token,
             ]
     return []
Esempio n. 12
0
    def _group_transaction(self, btkn):
        tidx_offset = 0
        pidx, prev_, is_commit = None, None, False
        start_idx = 0
        if isinstance(btkn, sql.Begin):
            start_idx = 1
        elif isinstance(btkn, sql.For):
            start_idx = btkn.loop_idx + 1

        for idx, token in enumerate(list(btkn)):
            tidx = idx - tidx_offset

            if token.is_whitespace:
                continue

            if token.is_group and not isinstance(token, sql.Transaction):
                if self._group_transaction(token):
                    if isinstance(token, sql.If):
                        grp = btkn.group_tokens(sql.Transaction, tidx, tidx + 1)
                    if not isinstance(btkn, sql.If):
                        grp = btkn.group_tokens(sql.Transaction, start_idx, tidx - 1)
                        start_idx = btkn.token_index(grp)
                        start_idx += 3
                        tidx_offset += tidx - start_idx
                        tidx_offset += 2
                        if isinstance(token, sql.If):
                            start_idx = start_idx - 1
                            tidx_offset = tidx_offset + 1
                    is_commit = True
                continue

            if imt(token, m=sql.Transaction.M_CLOSE):
                to_idx, next_ = btkn.token_next(tidx)
                if next_.value == ';':
                    if not isinstance(btkn, sql.If):
                        grp = btkn.group_tokens(sql.Transaction, start_idx, to_idx)
                        start_idx = btkn.token_index(grp)
                        start_idx += 1
                        tidx_offset += to_idx - start_idx
                        tidx_offset += 1
                    is_commit = True
                continue

        return is_commit
Esempio n. 13
0
 def valid(token):
     return imt(token, i=sqlcls, m=m_role, t=ttypes)
Esempio n. 14
0
 def token_next_by(self, i=None, m=None, t=None, idx=-1, end=None):
     funcs = lambda tk: imt(tk, i, m, t)
     idx += 1
     return self._token_matching(funcs, idx, end)
Esempio n. 15
0
 def valid_prev(token):
     sqlcls = sql.SquareBrackets, sql.Identifier
     ttypes = T.Name, T.String.Symbol
     return imt(token, i=sqlcls, t=ttypes)
Esempio n. 16
0
 def valid(token):
     return imt(token, i=sqlcls, t=ttypes)
Esempio n. 17
0
 def match(token):
     return imt(token, t=(T.Operator, T.Wildcard))
Esempio n. 18
0
 def valid_next(token):
     ttypes = T.DML, T.DDL
     return not imt(token, t=ttypes) and token is not None