def _url_for_chart(self): params = dict(cht='bhs', chxt="x,y", chco="ffffff00,0000ff", chg="7.1,0") schedule = parse_string(self.code) params['chds'] = "0,%d" % schedule.days params['chs'] = self.options.get( 'size', '480x%d' % (27 * (len(schedule.items) + 1))) interval = self.options.get('interval', None) if interval: schedule.interval = interval from datetime import timedelta xaxis = [] min = schedule.min for i in range(0, schedule.days): if i % schedule.interval == 0 or i == (schedule.days - 1): date = min + timedelta(days=i) xaxis.append("%02d/%02d" % (date.month, date.day)) else: xaxis.append("") params['chxl'] = "0:|%s|1:|%s" % ("|".join(xaxis), "|".join( n.label for n in reversed(schedule.items))) params['chd'] = "t:%s|%s" % (",".join( str(schedule.far_to(n)) for n in schedule.items), ",".join( str(n.width) for n in schedule.items)) return params
def test_quantor(self): string = _fofify(" ? [ X, Y, Z] : ( a | b ) ") result = p.parse_string(string)[0] quantor = result["formula"] self.assertEqual(type(quantor), Quantor) self.assertEqual(len(quantor.variables), 3)
def proof(fstr): fstr = "fof(ax,axiom, (" + fstr + ") )." print("fof",fstr) formula = p.parse_string(fstr)[0]['formula'] formula = o.transform(formula.negate()) return r.proof(formula)
def proof(fstr): fstr = "fof(ax,axiom, (" + fstr + ") )." print("fof", fstr) formula = p.parse_string(fstr)[0]['formula'] formula = o.transform(formula.negate()) return r.proof(formula)
def convert_to_implications(constraint_str, immutable_flag_str=''): cons = list(parse_string(constraint_str)) immutable_flags = parse_immutables(immutable_flag_str) n = normalize(cons, immutable_flags) flat = [] for e in n: flat += to_implication(e) return flat
def __init__(self, element, value_xpath): self.element = element self.attrib = element.attrib self.text = element.text self.id = element.attrib.get('id') self.tail = element.tail self.all_text = get_all_text_newline(element) if value_xpath: self.value = parse_string(element, value_xpath)
def process_text(self, text): """ Loops over each word, stopping if a word is not in mode_grammar. If the rule's value is a function, call the function with the rest of the words. Otherwise send the rule's value. """ words = WordBuffer(text) for word in words: # Correct the word if it is in the aliases. if word in self.aliases: utilities.log(word + " => " + self.aliases[word], verbose=True) word = self.aliases[word] if word in self.mode_grammar_compiled: if callable(self.mode_grammar_compiled[word]): try: arg_count = len(inspect.getargspec(self.mode_grammar_compiled[word]).args) if arg_count == 1: # Run the rest of the text through the function. rest = words.get_all() if rest == "": utilities.log("ERROR: No text provided for grammar function.") break result = self.mode_grammar_compiled[word](rest) else: result = self.mode_grammar_compiled[word]() if isinstance(result, str) and result != "": keys = globals.Compiler.compile_key_events(parser.parse_string(result)) self.send_keystrokes(keys) utilities.log(word + "(<phrase>) -> " + result) utilities.log("compiled: " + keys, verbose=True) else: utilities.log(word + "(<phrase>)") except: utilities.log("ERROR: Unexpected error in grammar function") utilities.log(traceback.format_exc()) utilities.log(str(sys.exc_info()[0])) break else: keys = self.mode_grammar_compiled[word] self.send_keystrokes(keys) try: utilities.log(word + " -> " + self.mode_grammar[word]) except: utilities.log("UNPRINTABLE GRAMMAR RULE", verbose=True) else: utilities.log("Unrecognized rule: " + word) break
def parse(self, filename=None, text=None, stream=None): if filename: parsed = parse_file(filename) elif text: parsed = parse_string(text) elif stream: parsed = parse_stream(stream) else: raise BeaverException('Must specify filename, text, or stream to parse.') stmts = 0 for stmt in parsed: stmts += 1 self.execute(stmt) return stmts
def parse(self, filename=None, text=None, stream=None): if filename: parsed = parse_file(filename) elif text: parsed = parse_string(text) elif stream: parsed = parse_stream(stream) else: raise ScotchException("Must specify filename, text, or stream to parse.") stmts = 0 for stmt, start, end in parsed: stmts += 1 self.execute(stmt) return stmts
def extract_from_tar(path, json_file): with tarfile.open(path, 'r:gz') as tar: for member in tar: if not member.isreg(): continue f = tar.extractfile(member) content = f.read() try: result = parse_string(content) for ele in result['elements']: texify(result['pmid'], result['pmcid'], result['article_type'], ele) json_file.write(json.dumps(ele) + '\n') except: traceback.print_exc()
def _url_for_chart(self): chart = parse_string(self.code) if chart.type in ('p', 'p3'): params = self._url_for_piechart(chart) elif chart.type in ('lc', ): params = self._url_for_linechart(chart) elif chart.type in ('lxy', ): params = self._url_for_linechart_xy(chart) elif chart.type in ('bhs', 'bvs', 'bhg', 'bvg'): params = self._url_for_barchart(chart) elif chart.type in ('v'): params = self._url_for_venndiagram(chart) elif chart.type in ('s'): params = self._url_for_spotchart(chart) else: params = {} params['cht'] = chart.type params['chs'] = self.options.get('size', '320x240') return params
def _url_for_chart(self): chart = parse_string(self.code) if chart.type in ('p', 'p3'): params = self._url_for_piechart(chart) elif chart.type in ('lc',): params = self._url_for_linechart(chart) elif chart.type in ('lxy',): params = self._url_for_linechart_xy(chart) elif chart.type in ('bhs', 'bvs', 'bhg', 'bvg'): params = self._url_for_barchart(chart) elif chart.type in ('v'): params = self._url_for_venndiagram(chart) elif chart.type in ('s'): params = self._url_for_spotchart(chart) else: params = {} params['cht'] = chart.type params['chs'] = self.options.get('size', '320x240') return params
def transform(): source = request.form.get('source', "") if not source.endswith("\n"): source += "\n" node = parse_string(source) visitor_source = request.form.get('visitor', "") globals_ = {"NodeVisitor": NodeVisitor, "type_name": type_name} output = None error = None try: exec(visitor_source, globals_, None) visitor = globals_["Visitor"]() visitor.visit(node) output = str(node) except Exception as e: exc_type, exc_value, exc_traceback = sys.exc_info() parts = traceback.extract_tb(exc_traceback) _, lineno, _, _ = parts[-1] error = {"lineno": lineno, "message": str(exc_value)} return jsonify(output=output, error=error)
def print_solutions(constraint_str, immutable_str): # sort n-ary expressions immutable_flags = parse_immutables(immutable_str) ast = sort_nary(validate_ast_passthrough(parse_string(constraint_str)), immutability_sort(immutable_str)) ast = list(ast) print(ast) print() # implication variant impl_ast = [] for c, e in flatten3(ast): if c: e = Implication(c, [e]) impl_ast.append(e) all_flags = frozenset(x.name for x in get_all_flags(ast)) # print flag names, vertically sorted_flags = sorted(all_flags) no_flags = len(sorted_flags) y_max = max(len(x) for x in sorted_flags) for y in range(0, y_max): for f in sorted_flags + ['|'] + sorted_flags: print(' %s' % (f[len(f)-y_max+y] if y >= y_max - len(f) else ' '), end='') print('') # solve for input = 000... to 111... max_iters = 0 unsolvable = 0 mismatched_solutions = 0 for values in itertools.product((False, True), repeat=no_flags): inp_flags = dict(zip(sorted_flags, values)) skip = False for k, v in immutable_flags.items(): # skip mismatches for immutables if inp_flags[k] != v: skip = True break if skip: continue for f in sorted_flags: if f in immutable_flags: print('\033[33m', end='') print(' %d' % inp_flags[f], end='') if f in immutable_flags: print('\033[0m', end='') print(' |', end='') if validate_constraint(inp_flags, ast): print('\033[32m', end='') for f in sorted_flags: print(' %d' % inp_flags[f], end='') print(' (==)\033[0m') else: try: ret, iters = do_solving(sorted_flags, inp_flags, ast, immutable_flags) except (ImmutabilityError, InfiniteLoopError): unsolvable += 1 else: if iters > max_iters: max_iters = iters ret_impl, ret_iters = do_solving(sorted_flags, inp_flags, impl_ast, immutable_flags, verbose=False) if ret != ret_impl: mismatched_solutions += 1 print('%*s |\033[31m' % (len(sorted_flags) * 2, ''), end='') for f in sorted_flags: if ret_impl[f] != ret[f]: print(' \033[1m%d\033[22m' % ret_impl[f], end='') else: print(' %d' % ret_impl[f], end='') print(' [mismatch between implication and basic form]\033[0m') print() print('max iterations: %d; unsolvable: %d; mismatched solutions for transform: %d' % (max_iters, unsolvable, mismatched_solutions))
def from_string(string): return tptp.parse_string(string)
from parser import parse_string, parse_file from simpleBDI import * from rule import Rule #example from paper ruleset = """ di,bs -(2)-> .ib,-bs di,ws -(2)-> .iw,-ws 0:+di,+bs,+ws """ p = parse_string(ruleset) trace = create_trace(TraceElement(set(), p[0], None, p[1], None, "p")) i = 0 for t in trace: print(i, t) i += 1
def parse(): source = request.form.get('source', "") if not source.endswith("\n"): source += "\n" return jsonify(tree=to_json(parse_string(source)))
if len(r) <= 0: return False if len(r) == 1: return r[0] return AnyOfOperator(r) else: raise ValueError('Unknown AST expr: %s' % ast) def normalize(ast, immutables={}, trace=False): if trace: print("Input: %s" % ast) # a? b? c --> [a,b]?c merged = list(merge_and_expand_implications(ast)) if trace: print("After 1st merge: %s" % merged) # || ( a? ( b ) c ) -> || ( ( a b ) c ) unnested = list(replace_nested_implications(merged)) if trace: print("After removing nested implications: %s" % unnested) # kill ^^ and ?? boolean = list(replace_nary(unnested)) if trace: print("Converted to boolean algebra: %s" % boolean) # || () -> False, && () -> True, || ( immutable_true bar ) -> True, etc. simplified_ast = simplify_with_immutables(boolean, immutables) if trace: print("Simplified ast: %s" % simplified_ast) # reduce again # a? b? c --> [a,b]?c reduced = list(merge_and_expand_implications(simplified_ast)) if trace: print("End result after merging: %s" % reduced) return reduced if __name__ == '__main__': normalize(list(parse_string(sys.argv[1])), trace=True)
check_equal('?? ( a b )', [Implication([Flag('a')], [Flag('b').negated()])]) check_equal('|| ( a b c? ( d ) )', [ Implication( [Flag('b').negated(), Flag('c').negated()], [Flag('a')]), Implication( [Flag('b').negated(), Flag('d').negated()], [Flag('a')]) ]) check_equal( 'a b? ( c )', [Implication([], [Flag('a')]), Implication([Flag('b')], [Flag('c')])]) check_equal('^^ ( a b )', [ Implication([Flag('b').negated()], [Flag('a')]), Implication([Flag('a')], [Flag('b').negated()]) ]) if __name__ == '__main__': from replace_nary import normalize from parser import parse_string import sys if len(sys.argv) <= 1: selftest() exit(0) m = normalize(list(parse_string(sys.argv[1]))) print("Normalized: %s" % m) print("List of implications:") for i in m: print(" %s" % (to_implication(i)))
from participant import * from dialogue import * ruleset = """ hungry,no_food,at_home-(1)-> +go_to_shops at_home,go_to_shops -(2)-> .drive,-at_home,+at_shops,-go_to_shops no_food,at_shops -(1)-> .buy_food,+go_home,+have_food,-no_food go_home -(1)-> .drive,-at_shops,+at_home,-go_home hungry,have_food,at_home -(1)-> .eat,-hungry,-have_food,+no_food at_shops,need_clothing-(2)->.buy_clothes,-need_clothing 0:+hungry,+no_food,+at_home,+need_clothing 24:+hungry """ p = parse_string(ruleset, timesteps=31) trace = create_trace(TraceElement(set(), p[0], None, p[1], None, "p")) i = 0 print("TRACE BASED ON EXTERNAL OBSERVATIONS") for t in trace: #print(i,t.state,t.beliefs) if t.action != None: print(i, t.action) i += 1 a1 = Participant(trace, "a1") a2 = Participant(trace, "a2") wa = WhyAction("drive", 30, a2, None)
def flatten3(ast, conditions=[]): for expr in ast: if isinstance(expr, Flag): yield (conditions, expr) elif isinstance(expr, Implication): for x in flatten3(expr.constraint, conditions + expr.condition): yield x elif isinstance(expr, AnyOfOperator): # || ( a b c ... ) -> [!b !c ...]? ( a ) yield (conditions + [x.negated() for x in expr.constraint[1:]], expr.constraint[0]) elif isinstance(expr, AtMostOneOfOperator): # ?? ( a b c ... ) -> a? ( !b !c ... ) b? ( !c ... ) ... for i in range(0, len(expr.constraint) - 1): new_cond = conditions + expr.constraint[i:i + 1] for x in expr.constraint[i + 1:]: yield (new_cond, x.negated()) elif isinstance(expr, ExactlyOneOfOperator): for x in flatten3([AnyOfOperator(expr.constraint)], conditions): yield x for x in flatten3([AtMostOneOfOperator(expr.constraint)], conditions): yield x else: raise ValueError('Unknown AST expr: %s' % expr) if __name__ == '__main__': print(list(flatten3(parse_string(sys.argv[1]))))
validate_ast(expr.constraint) elif isinstance(expr, AllOfOperator): raise ValueError('All-of operator forbidden') elif isinstance(expr, NaryOperator): for x in expr.constraint: if isinstance(x, Flag): pass elif isinstance(x, Implication): raise ValueError( 'USE-conditional group in %s operator forbidden' % expr.op) elif isinstance(x, NaryOperator): raise ValueError('%s group in %s operator forbidden' % (x.op, expr.op)) else: raise NotImplementedError('Unknown AST subexpr: %s' % x) if len(expr.constraint) == 0: raise ValueError('Empty %s group forbidden' % expr.op) else: raise NotImplementedError('Unknown AST subexpr: %s' % expr) yield expr def validate_ast(ast): for x in validate_ast_passthrough(ast): pass if __name__ == '__main__': validate_ast(parse_string(sys.argv[1]))
yield x for x in expr.constraint: yield x elif isinstance(expr, NaryOperator): raise ValueError('N-ary operators should be replaced already') else: raise ValueError('Unknown AST expr: %s' % expr) def print_graph(ast): ast = list(ast) print('digraph {') for e in get_edges_from_flat_ast(ast): # dependee -> dependency print('\t"%s" -> "%s";' % e) nodes = frozenset(get_nodes_from_flat_ast(ast)) for n in nodes: # link nodes with their negations (if both present) if n.enabled and n.negated() in nodes: print('\t"%s" -> "%s" [color=red];' % (n.negated(), n)) print('\t"%s" -> "%s" [color=red];' % (n, n.negated())) print('}') if __name__ == '__main__': print_graph( flatten_implications( replace_allof(replace_nary(parse_string(sys.argv[1])))))