def simplify_division(node): variable_ctrs = {} order = [] for t in node: if t.ch in set(ascii_lowercase) or str(t.ch).isdigit(): if t.ch not in variable_ctrs.keys(): order.append(str(t.ch)) variable_ctrs[str(t.ch)] = 0 if node[0].ch in set(ascii_lowercase): node = tokenize('*') + node for i in range(0, len(node), 2): sign = node[i].ch var = str(node[i + 1].ch) if sign == '/': variable_ctrs[var] -= 1 else: variable_ctrs[var] += 1 new_node = [] for var in order: if str(var) != '1': ctr = variable_ctrs[var] if ctr < 0: new_node += ['/', var] * abs(ctr) elif ctr > 0: new_node += ['*', var] * ctr else: pass if len(new_node) == 0: return tokenize('1') if new_node[0] == '/': new_node = ['1'] + new_node elif new_node[0] == '*': new_node = new_node[1::] new_node = tokenize(new_node) return new_node
def add_zero(node): if len(node) < 2: return node else: if node[0].priority == 0: node = tokenize('+') + node new_node = [] for i in range(0, len(node) - 1, 2): ch_1, ch_2 = node[i].ch, node[i + 1].ch if str(ch_2) != '0': new_node.append(str(ch_1)) new_node.append(str(ch_2)) new_node = tokenize(new_node) return new_node
def mul_by_zero(node): for i in range(len(node) - 1): ch_1, ch_2 = node[i].ch, node[i + 1].ch if str(ch_1) == '0' and str(ch_2) == '*': new_node = ['0'] elif str(ch_1) == '*' and str(ch_2) == '0': new_node = ['0'] else: new_node = [str(t.ch) for t in node] new_node = tokenize(new_node) return new_node
def simplify_sum(node): variable_ctrs = {} order = [] for t in node: if t.ch in set(ascii_lowercase) or str(t.ch).isdigit(): if t.ch not in variable_ctrs.keys(): order.append(str(t.ch)) variable_ctrs[str(t.ch)] = 0 if node[0].ch != '-': node = tokenize('+') + node for i in range(0, len(node), 2): sign = node[i].ch var = str(node[i + 1].ch) if sign == '-': variable_ctrs[var] -= 1 else: variable_ctrs[var] += 1 new_node = [] for var in order: ctr = variable_ctrs[var] if ctr < 0: ctr = [str(ctr)[1::]] if ctr == ['1']: ctr = [] else: ctr += ['*'] new_node += ['-'] + ctr + [var] elif ctr > 0: ctr = [str(ctr)] if ctr == ['1']: ctr = ['+'] else: ctr = ['+'] + ctr + ['*'] new_node += ctr + [var] else: pass if len(new_node) == 0: return tokenize('0') new_node = tokenize(new_node) return new_node
def simplify_with_var(node): pos = node[0].pos end_pos = node[-1].pos digits = [i.ch for i in node if str(i.ch).isdigit()] if len(node) > 4 and len(digits) > 1: calculable = [] incalculable = [] tokens = node.copy() first_var = [] prev_token = None if tokens[0].ch in set(ascii_lowercase): first_var.append(tokens.pop(0)) prev_token = first_var[0] for token in tokens: if token.ch in set(ascii_lowercase): incalculable.append(prev_token) incalculable.append(token) elif str(token.ch).isdigit(): if prev_token: calculable.append(prev_token) calculable.append(token) prev_token = token incalculable = first_var + incalculable calculable = merge_tokens(*calculable, integer_devision=True) if incalculable[0].ch in set(ascii_lowercase): if int(calculable[0].ch) < 0: result = incalculable + tokenize('-') + calculable else: result = incalculable + tokenize('+') + calculable else: result = calculable + incalculable else: result = node result[0].pos = pos result[0].end_pos = end_pos return result
def to_zero_pwr(node): zero_pwr = 0 new_node = [] for token in node[::-1]: if str(token.ch) == '0': zero_pwr = 2 elif zero_pwr == 2: zero_pwr -= 1 elif zero_pwr == 1: zero_pwr -= 1 new_node += tokenize('1') else: new_node.append(token) zero_pwr = False return new_node[::-1]
def remove_dbl_neg(str_repr): processed, sign, inner, cut_str = select_brackets(str_repr) plus = '' left = processed.replace('{}({}'.format(sign, inner), '', 1) if left: left += '+' plus = '+' tokens = tokenize(inner) if sign == '-' and inner.startswith('-(') and inner.endswith(')'): str_repr = left + inner[2:len(inner) - 1] + cut_str elif len(tokens) == 2 and tokens[0].ch == '-': str_repr = str_repr.replace('-({})'.format(inner), plus + inner[1], 1) elif '(' not in inner: # sqr brackets added to avoid processing same brackets twice str_repr = str_repr.replace('(', '[', 1).replace(')', ']', 1) return str_repr
def pre_process(self, graph): return tokenize(graph)