def handle_conclusion(self, conclusion): if Parser.negationPattern.findall(conclusion): raise ParseError('negation found in conclusion') elif Parser.badPatternInConclusion.findall(conclusion): raise ParseError('wrong pattern found in conclusion') else: return Parser.getConclusionFactPattern.findall(conclusion)
def append_conclusion(self, left_member, right_member, isDoubleEquivalence): if not right_member: raise ParseError('No right member') if not left_member: raise ParseError('No left member') conclusions = Parser.handle_conclusion(right_member) if isDoubleEquivalence: self.append_conclusion(right_member, left_member, None) if conclusions: for conclusion in conclusions: if not conclusion in self.rules: self.rules[conclusion] = [] self.rules[conclusion].append(left_member)
def add_rules(self, line): matches = Parser.implicationPattern.match(line) if matches is None: raise ParseError('No implication symbol found on line') else: self.append_conclusion(matches.group(1), matches.group(3), matches.group(2))
def from_postfix_to_graph(postfix): """ ['a', 'b', '+', 'C', '|'] should become: { '|': [ 'C', { '+': ['A', 'B'] } ] } """ operandes = [] if len(postfix) <= 1: if isinstance(postfix[0], Token) and postfix[0].is_operator(): raise ParseError('only one operator') else: return postfix.pop() if postfix else {} for index, token in enumerate(postfix): if isinstance(token, Token) and token.is_operator(): """ we will take last two operands on stack and affiliate them with current operator e.g. : postfix = [a, b, c, +, ^] operandes appends until '+', where operands=[a, b, c] operation is b+c => { '+': [b,c] } operands is now = [ a ] we append to it new operand (which is operationItem created) then we craft a new array to have a recursion call with: [a, { '+': [b,c] }, ^] """ operandes.append({token: [operandes.pop(), operandes.pop()]}) return from_postfix_to_graph(operandes + postfix[index + 1:]) else: operandes.append(token)
def handle_close_parenthesis(output, pile): while pile and pile[-1].is_open_parenthesis() is not True: output.append(pile.pop()) if pile: pile.pop() else: raise ParseError('-no corresponding parenthesis')
def parse_string_to_token(cls, string): """ ex: parse_string_to_token('(!a + B) | c') => [ '(', '!a', '+', 'B', ')', '|', 'c' ] """ if not Parser.tokenPattern.match(string): raise ParseError('bad token pattern') tokens = Parser.tokenPatternBis.findall(string) tokenInstances = [] lastToken = None for token in tokens: curToken = Token(token) tokenInstances.append(curToken) if not curToken.is_parenthesis() and Token.token_are_the_same_type( lastToken, curToken): raise ParseError('token repetition detected') if not curToken.is_parenthesis(): lastToken = tokenInstances[-1] return tokenInstances
def parse_facts_or_queries(self, line): regexResult = Parser.getFactsOrQueryPattern.match(line) if not regexResult and len(line) > 0: raise ParseError('Bad pattern ') matchedItems = list(regexResult.group(1)) listMatchedItems = [] addNegation = None for matchedItem in matchedItems: if matchedItem == '!': addNegation = True continue else: listMatchedItems.append( '!' + matchedItem if addNegation else matchedItem) addNegation = None return listMatchedItems
def from_tokens_to_postfix(tokens): output = [] pile = [] for token in tokens: if token.is_operator(): handle_operator(token, output, pile) elif token.is_open_parenthesis(): pile.append(token) elif token.is_close_parenthesis(): handle_close_parenthesis(output, pile) else: output.append(token) #print('cur token :', token, ' cur pile: ', pile, ' cur output: ', output) if '(' in pile: raise ParseError('- open parenthesis not closed') output.extend(list(reversed(pile))) return output
def set_query(self, line): if self.queries: raise ParseError('file not well formatted, queries given twice') queries = self.parse_facts_or_queries(line) self.queries = queries
def set_facts(self, line): if self.facts: raise ParseError('file not well formatted, facts given twice') facts = self.parse_facts_or_queries(line) self.facts = facts