def plain_result(**kw): if 'metadata' in kw: kw['metadata'].create_all() elif 'tables' in kw: if len(kw['tables']): kw['tables'][0].metadata.create_all() return parse_graph(sasd.create_schema_graph(**kw))
def get_scene_graph_of_image(id=61512): """ Get Scene Graph of an image. """ image = get_image_data(id=id) data = utils.retrieve_data('/api/v0/images/' + str(id) + '/graph') if 'detail' in data and data['detail'] == 'Not found.': return None return utils.parse_graph(data, image)
def get_region_graph_of_region(image_id=61512, region_id=1): """ Get Region Graph of a particular Region in an image. """ image = get_image_data(id=image_id) data = utils.retrieve_data('/api/v0/images/' + str(image_id) + '/regions/' + str(region_id)) if 'detail' in data and data['detail'] == 'Not found.': return None return utils.parse_graph(data[0], image)
def helper_function( grammar_hom, grammar_automata, graph_name, right_result, func_hom=lambda x: set(filter(lambda x: x[1] == 'S', x[0])) == x[1], func_automata=lambda x: set(x[0]) == x[1]): G_hom = parse_grammar_hom(grammar_hom) G_automata = parse_grammar_automata(grammar_automata) if TRANS_CLOSURE: result_closure = trans_closure(parse_graph(graph_name), G_hom) assert func_hom((result_closure, right_result)) print("test for {grammar_hom} and {graph_name}" " - trans_closure - OK".format( grammar_hom=os.path.basename(grammar_hom), graph_name=os.path.basename(graph_name))) print() if BOTTOM_UP: result_bottom_up = bottom_up(parse_graph(graph_name), G_automata) assert func_automata((result_bottom_up, right_result)) print("test for {grammar_automata} and {graph_name}" " - bottom_up - OK".format( grammar_automata=os.path.basename(grammar_automata), graph_name=os.path.basename(graph_name))) print() if GLL: result_gll = gll(parse_graph(graph_name), G_automata) assert func_automata((result_gll, right_result)) print("test for {grammar_automata} and {graph_name}" " - gll - OK".format( grammar_automata=os.path.basename(grammar_automata), graph_name=os.path.basename(graph_name))) print()
def matrix_algorithm(graph_path, gram_path, out=None, test=False): G = utils.parse_chomsky_grammar(gram_path) grammar = G.rules graph, n, _ = utils.parse_graph(graph_path) matrix = [[[] for i in range(n)] for j in range(n)] for (i, j, label) in graph: for left, right in grammar.items(): for value in right: if value == label: matrix[int(i)][int(j)].append(left) # Add loops if there is rules like A -> eps in grammar for i in range(n): matrix[i][i] += G.eps_nonterms is_changing = True while is_changing: is_changing, matrix = matrix_closure(matrix, grammar, n) res = set() res_count = 0 for i in range(n): for j in range(n): if test and 'S' in matrix[i][j]: res_count += 1 else: for non_term in matrix[i][j]: res.add((i, non_term, j)) if test: return res_count else: if out is None: for (i, non_term, j) in res: print(str(i) + ',' + non_term + ',' + str(j)) else: with open(out, 'w') as f: for (i, non_term, j) in res: f.write(str(i) + ',' + non_term + ',' + str(j) + '\n')
def top_down(graph_path, gram_path, out=None, test=False): automaton, size = utils.parse_graph(graph_path, gll=True) grammar, _ = utils.parse_grammar(gram_path) gll = gll_classes.GLL(grammar, automaton, size) res = gll.main() res_count = 0 for i, nonterm, j in res: if nonterm == 'S': res_count += 1 if test: return res_count elif out is None: for i, nonterm, j in res: print(str(i) + ',' + nonterm + ',' + str(j)) else: with open(out, 'w') as f: for i, nonterm, j in res: f.write(str(i) + ',' + nonterm + ',' + str(j) + '\n')
def test_doc_graphs(): with open('data/data_for_tests/graphs') as f: graphs = ['data/graphs/data/' + x for x in f.read().splitlines()] if Q1_: # q1 with open('data/data_for_tests/q1_answers') as f: right_q1 = [int(x) for x in f.read().splitlines()] Q1_hom = parse_grammar_hom('data/grammars/Q1_hom') Q1_automata = parse_grammar_automata('data/grammars/Q1_automata') graphs = graphs[:NUM] right_q1 = right_q1[:NUM] for graph, answer in zip(graphs, right_q1): print("start test for {graph} and {grammar}".format( graph=os.path.basename(graph), grammar='Q1')) if TRANS_CLOSURE: res = trans_closure(parse_graph(graph), Q1_hom) assert (len(list(filter(lambda x: x[1] == 'S', res)))) == answer print( "test for {graph} and {grammar}- trans_closure OK".format( graph=os.path.basename(graph), grammar='Q1')) print() if BOTTOM_UP: res = bottom_up(parse_graph(graph), Q1_automata) assert (len(list(filter(lambda x: x[1] == 'S', res)))) == answer print("test for {graph} and {grammar}- bottom_up OK".format( graph=os.path.basename(graph), grammar='Q1')) print() if GLL: res = gll(parse_graph(graph), Q1_automata) assert (len(list(filter(lambda x: x[1] == 'S', res)))) == answer print("test for {graph} and {grammar} - gll OK".format( graph=os.path.basename(graph), grammar='Q1')) print() if Q2_: # q2 with open('data/data_for_tests/q2_answers') as f: right_q2 = [int(x) for x in f.read().splitlines()] Q2_hom = parse_grammar_hom('data/grammars/Q2_hom') Q2_automata = parse_grammar_automata('data/grammars/Q2_automata') graphs = graphs[:NUM] right_q2 = right_q2[:NUM] for graph, answer in zip(graphs, right_q2): if TRANS_CLOSURE: res = trans_closure(parse_graph(graph), Q2_hom) print("start test for {graph} and {grammar}".format( graph=os.path.basename(graph), grammar='Q2')) assert (len(list(filter(lambda x: x[1] == 'S', res)))) == answer print( "test for {graph} and {grammar}- trans_closure OK".format( graph=os.path.basename(graph), grammar='Q2')) print() if BOTTOM_UP: res = bottom_up(parse_graph(graph), Q2_automata) assert (len(list(filter(lambda x: x[1] == 'S', res)))) == answer print("test for {graph} and {grammar}- bottom_up OK".format( graph=os.path.basename(graph), grammar='Q2')) print() if GLL: res = gll(parse_graph(graph), Q2_automata) assert (len(list(filter(lambda x: x[1] == 'S', res)))) == answer print("test for {graph} and {grammar} - gll OK".format( graph=os.path.basename(graph), grammar='Q2')) print()
def bottom_up_algo(automaton_path, grammar_path, out=None, test=False): # grammar parameters g, grammar_vertex = utils.parse_grammar(grammar_path) grammar = g.edges start_states = g.starts final_states = g.finals n = g.length # RFA parameters automaton, k, automaton_vertex = utils.parse_graph(automaton_path) matrix = [[False for i in range(n * k)] for j in range(n * k)] # set of edges used in transitive closure active_edges = set() # allows to work with (i, j) coordinates instead of (0, 0'), (1, 1') map_indices_to_states = dict() counter = 0 for a in automaton_vertex: for b in grammar_vertex: map_indices_to_states[counter] = (int(a), int(b)) counter += 1 indices = {v: k for k, v in map_indices_to_states.items()} res = set() for st_state, st_nterm in start_states.items(): for fin_state, fin_nterm in final_states.items(): if st_nterm == fin_nterm and fin_state == st_state: for i in automaton_vertex: res.add((str(i) + ',' + fin_nterm + ',' + str(i) + '\n').replace(' ', '')) smth_changes = True while smth_changes: smth_changes = False matrix, automaton, active_edges, inters_change = intersection( indices, automaton, grammar, matrix, final_states, start_states, active_edges) matrix, automaton, closure_change = closure(matrix, n * k, map_indices_to_states, start_states, final_states, automaton, active_edges) if inters_change or closure_change: smth_changes = True for i in range(n * k): for j in range(n * k): if matrix[i][j]: a, b = map_indices_to_states[i] c, d = map_indices_to_states[j] if b in start_states.keys() and d in final_states.keys(): res.add( str(a) + ',' + start_states[b] + ',' + str(c) + '\n') test_res = [x for x in res if ',S,' in x] if test: return len(test_res) elif out is None: for item in res: print(item.strip()) else: with open(out, 'w') as f: for item in res: f.write(item)
def plain_result(mapper, **kw): return parse_graph(sasd.create_uml_graph(mapper, **kw))