示例#1
0
    def __init__(self, elt, member, parent):
        Type.__init__(self, member.name)
        self.is_list = True
        self.member = member
        self.parent = parent

        if elt.tag == 'list':
            elts = list(elt)
            self.expr = Expression(elts[0] if len(elts) else elt, self)
        elif elt.tag == 'valueparam':
            self.expr = Expression(elt, self)

        self.size = member.size if member.fixed_size() else None
        self.nmemb = self.expr.nmemb if self.expr.fixed_size() else None
示例#2
0
    def __init__(self, elt, member, parent):
        Type.__init__(self, member.name)
        self.is_expr = True
        self.member = member
        self.parent = parent

        self.expr = Expression(list(elt)[0], self)

        self.size = member.size
        self.nmemb = 1
示例#3
0
文件: graph.py 项目: fau-is/cc-dcr
 def create_expressions_dcr_graph(self):
     """
     Creates all expressions within a dcr graph
     :return:
     """
     for expression in self.dcr_xml_root.iter('expression'):
         expression_id = expression.get('id')
         expression_str = expression.get('value')
         expression_tmp = Expression(expression_str, expression_id)
         self.Expressions.append(expression_tmp)
示例#4
0
    def rule_id(self, stack):
        it = iter(reversed(stack))

        try:
            identifier = Parser.check_token(next(it), TokenType.IDENTIFIER)
            Parser.check_token(next(it), ExprPrecedence.SHIFT)
        except TokenMismatchError:
            return False

        new_expr = Expression(FAGenerator.load_c(identifier.data))
        Parser.reduce(stack, new_expr)

        return True
示例#5
0
    def rule_quan_0_N(self, stack):
        it = iter(reversed(stack))

        try:
            Parser.check_token(next(it), TokenType.META_QUANTIFIER_0_N)
            expr = Parser.check_token(next(it), Expression)
            Parser.check_token(next(it), ExprPrecedence.SHIFT)
        except TokenMismatchError:
            return False

        new_expr = Expression(FAGenerator.iterate(expr.automat))
        Parser.reduce(stack, new_expr)

        return True
示例#6
0
    def rule_brackets(self, stack):
        it = iter(reversed(stack))

        try:
            Parser.check_token(next(it), TokenType.RIGHT_BRACKET)
            expr = Parser.check_token(next(it), Expression)
            Parser.check_token(next(it), TokenType.LEFT_BRACKET)
            Parser.check_token(next(it), ExprPrecedence.SHIFT)
        except TokenMismatchError:
            return False

        new_expr = Expression(expr.automat)
        Parser.reduce(stack, new_expr)

        return True
示例#7
0
    def rule_or(self, stack):
        it = iter(reversed(stack))

        try:
            expr_1 = Parser.check_token(next(it), Expression)
            Parser.check_token(next(it), TokenType.META_OR)
            expr_2 = Parser.check_token(next(it), Expression)
            Parser.check_token(next(it), ExprPrecedence.SHIFT)
        except TokenMismatchError:
            return False

        new_expr = Expression(FAGenerator.union(expr_2.automat,
                                                expr_1.automat))
        Parser.reduce(stack, new_expr)

        return True
示例#8
0
def read(filename):  # noqa: C901 (many simple branches)
    maps = _get_maps()
    lineno = 0
    for line in open(filename, 'r'):
        lineno += 1
        line = line.strip()
        # skip comments and blank lines
        if re.match(r'(#.*)?$', line, re.IGNORECASE):
            continue
        # parse options with a single integer argument
        m = re.match(
            r'(?P<keyword>threads|ldap_version|bind_timelimit|timelimit|'
            r'idle_timelimit|reconnect_sleeptime|reconnect_retrytime|pagesize|'
            r'nss_min_uid|nss_uid_offset|nss_gid_offset)\s+(?P<value>\d+)',
            line, re.IGNORECASE)
        if m:
            globals()[m.group('keyword').lower()] = int(m.group('value'))
            continue
        # parse options with a single boolean argument
        m = re.match(
            r'(?P<keyword>referrals|nss_nested_groups|nss_getgrent_skipmembers|'
            r'nss_disable_enumeration)\s+(?P<value>%s)' %
            ('|'.join(_boolean_options.keys())), line, re.IGNORECASE)
        if m:
            globals()[m.group('keyword').lower()] = _boolean_options[m.group(
                'value').lower()]
            continue
        # parse options with a single no-space value
        m = re.match(
            r'(?P<keyword>uid|gid|bindpw|rootpwmodpw|sasl_mech)\s+(?P<value>\S+)',
            line, re.IGNORECASE)
        if m:
            globals()[m.group('keyword').lower()] = m.group('value')
            continue
        # parse options with a single value that can contain spaces
        m = re.match(
            r'(?P<keyword>binddn|rootpwmoddn|sasl_realm|sasl_authcid|'
            r'sasl_authzid|sasl_secprops|krb5_ccname|tls_cacertdir|'
            r'tls_cacertfile|tls_randfile|tls_ciphers|tls_cert|tls_key|'
            r'pam_password_prohibit_message)\s+(?P<value>\S.*)', line,
            re.IGNORECASE)
        if m:
            globals()[m.group('keyword').lower()] = m.group('value')
            continue
        # log <SCHEME> [<LEVEL>]
        m = re.match(
            r'log\s+(?P<scheme>syslog|/\S*)(\s+(?P<level>%s))?' %
            ('|'.join(_log_levels.keys())), line, re.IGNORECASE)
        if m:
            logs.append((m.group('scheme'),
                         _log_levels[str(m.group('level')).lower()]))
            continue
        # uri <URI>
        m = re.match(r'uri\s+(?P<uri>\S+)', line, re.IGNORECASE)
        if m:
            # FIXME: support multiple URI values
            # FIXME: support special DNS and DNS:domain values
            global uri
            uri = m.group('uri')
            continue
        # base <MAP>? <BASEDN>
        m = re.match(
            r'base\s+((?P<map>%s)\s+)?(?P<value>\S.*)' %
            ('|'.join(maps.keys())), line, re.IGNORECASE)
        if m:
            mod = maps[str(m.group('map')).lower()]
            if not hasattr(mod, 'bases'):
                mod.bases = []
            mod.bases.append(m.group('value'))
            continue
        # filter <MAP> <SEARCHFILTER>
        m = re.match(
            r'filter\s+(?P<map>%s)\s+(?P<value>\S.*)' %
            ('|'.join(maps.keys())), line, re.IGNORECASE)
        if m:
            mod = maps[m.group('map').lower()]
            mod.filter = m.group('value')
            continue
        # scope <MAP>? <SCOPE>
        m = re.match(
            r'scope\s+((?P<map>%s)\s+)?(?P<value>%s)' %
            ('|'.join(maps.keys()), '|'.join(_scope_options.keys())), line,
            re.IGNORECASE)
        if m:
            mod = maps[str(m.group('map')).lower()]
            mod.scope = _scope_options[m.group('value').lower()]
            continue
        # map <MAP> <ATTRIBUTE> <ATTMAPPING>
        m = re.match(
            r'map\s+(?P<map>%s)\s+(?P<attribute>\S+)\s+(?P<value>\S.*)' %
            ('|'.join(maps.keys())), line, re.IGNORECASE)
        if m:
            mod = maps[m.group('map').lower()]
            attribute = m.group('attribute')
            if attribute not in mod.attmap:
                raise ParseError(filename, lineno,
                                 'attribute %s unknown' % attribute)
            mod.attmap[attribute] = m.group('value')
            # TODO: filter out attributes that cannot be an expression
            continue
        # deref <DEREF>
        m = re.match(
            r'deref\s+(?P<value>%s)' % '|'.join(_deref_options.keys()), line,
            re.IGNORECASE)
        if m:
            global deref
            deref = _deref_options[m.group('value').lower()]
            continue
        # nss_initgroups_ignoreusers <USER,USER>|<ALLLOCAL>
        m = re.match(r'nss_initgroups_ignoreusers\s+(?P<value>\S.*)', line,
                     re.IGNORECASE)
        if m:
            users = m.group('value')
            if users.lower() == 'alllocal':
                # get all users known to the system currently (since nslcd
                # isn't yet running, this should work)
                import pwd
                users = (x.pw_name for x in pwd.getpwall())
            else:
                users = users.split(',')
                # TODO: warn about unknown users
            nss_initgroups_ignoreusers.update(users)
            continue
        # pam_authz_search <FILTER>
        m = re.match(r'pam_authz_search\s+(?P<value>\S.*)', line,
                     re.IGNORECASE)
        if m:
            from expr import Expression
            pam_authz_searches.append(Expression(m.group('value')))
            # TODO: check pam_authz_search expression to only contain
            # username, service, ruser, rhost, tty, hostname, fqdn, dn or
            # uid variables
            continue
        # ssl <on|off|start_tls>
        m = re.match(r'ssl\s+(?P<value>%s)' % '|'.join(_ssl_options.keys()),
                     line, re.IGNORECASE)
        if m:
            global ssl
            ssl = _ssl_options[m.group('value').lower()]
            continue
        # sasl_canonicalize yes|no
        m = re.match(
            r'(ldap_?)?sasl_(?P<no>no)?canon(icali[sz]e)?\s+(?P<value>%s)' %
            ('|'.join(_boolean_options.keys())), line, re.IGNORECASE)
        if m:
            global sasl_canonicalize
            sasl_canonicalize = _boolean_options[m.group('value').lower()]
            if m.group('no'):
                sasl_canonicalize = not sasl_canonicalize
            continue
        # tls_reqcert <demand|hard|yes...>
        m = re.match(
            r'tls_reqcert\s+(?P<value>%s)' %
            ('|'.join(_tls_reqcert_options.keys())), line, re.IGNORECASE)
        if m:
            global tls_reqcert
            tls_reqcert = _tls_reqcert_options[m.group('value').lower()]
            continue
        # validnames /REGEX/i?
        m = re.match(r'validnames\s+/(?P<value>.*)/(?P<flags>[i]?)$', line,
                     re.IGNORECASE)
        if m:
            global validnames
            flags = 0 | re.IGNORECASE if m.group('flags') == 'i' else 0
            validnames = re.compile(m.group('value'), flags=flags)
            continue
        # reconnect_invalidate <MAP>,<MAP>,...
        m = re.match(r'reconnect_invalidate\s+(?P<value>\S.*)', line,
                     re.IGNORECASE)
        if m:
            dbs = re.split('[ ,]+', m.group('value').lower())
            for db in dbs:
                if db not in list(maps.keys()) + ['nfsidmap']:
                    raise ParseError(filename, lineno, 'map %s unknown' % db)
            reconnect_invalidate.update(dbs)
            continue
        # unrecognised line
        raise ParseError(filename, lineno, 'error parsing line %r' % line)
    # if logging is not configured, default to syslog
    if not logs:
        logs.append(('syslog', logging.INFO))
    # dump config (debugging code)
    for k, v in globals().items():
        if not k.startswith('_'):
            logging.debug('%s=%r', k, v)
示例#9
0
 def __init__(self, value):
     """Parse the expression as a string."""
     self.expression = Expression(value[1:-1])
     super(ExpressionMapping, self).__init__(value)
示例#10
0
 def setUp(self):
     self.expression = Expression()
示例#11
0
 def __init__(self, value):
     """Parse the expression as a string."""
     self.value = value
     self.expression = Expression(value[1:-1])