def __readVariableAssignmentToEntity(self): f = FILES["variable_assignment_to_entity_object"] % self.ontology_name # loaded_entity_behaviours = getData(f) data = getData(f) if data: loaded_entity_behaviours = data["behaviours"] # self.node_arc_associations = data["associations"] if loaded_entity_behaviours: for entity_str_ID in loaded_entity_behaviours: # careful there may not be all entities at least during # developments if loaded_entity_behaviours[entity_str_ID]: dummy = VariantRecord() data = loaded_entity_behaviours[entity_str_ID] for atr in dummy: if atr not in data: data[atr] = None tree = {} for treeStrID in data["tree"]: tree[int(treeStrID)] = data["tree"][treeStrID] data["tree"] = tree nodes = {} for nodeStrID in data["nodes"]: nodes[int(nodeStrID)] = data["nodes"][nodeStrID] data["nodes"] = nodes self.entity_behaviours[entity_str_ID] = VariantRecord( tree=data["tree"], nodes=data["nodes"], IDs=data["IDs"], root_variable=data["root_variable"], blocked_list=data["blocked"], buddies_list=data["buddies"], to_be_inisialised=data["to_be_initialised"])
def read(self, f): data = getData(f) status = False if data: for hash in data: self[hash] = data[hash] status = True return status
def getAutomata(file_spec, active_objects_all_phases): mouse_automata = {} key_automata = {} for phase in PHASES: print("\nphase", phase) editor_states = GRAPH_EDITOR_STATES[phase] mouse_automata[phase] = MouseAutomaton(active_objects_all_phases[phase], editor_states) key_automata[phase] = KeyAutomaton() if os.path.exists(file_spec): for phase in PHASES: automata = getData(file_spec) if phase in automata["mouse"]: mouse_automata[phase].setAutomaton(automata["mouse"][phase]) if phase in automata["key"]: key_automata[phase].setAutomaton(automata["key"][phase]) return mouse_automata, key_automata
def makeLatexDoc(file_name, assignments, ontology_container, dot_graph_file=""): ontology_location = ontology_container.ontology_location ontology_name = ontology_container.ontology_name latex_equation_file = FILES["coded_equations"] % (ontology_location, "latex") latex_variable_file = FILES["coded_variables"] % (ontology_location, "latex") latex_equations = getData(latex_equation_file) compiled_variable_labels = getEnumeratedData(latex_variable_file) variables = ontology_container.variables # var_ID = assignments["root_variable"] # tree = VarEqTree(variables,var_ID,[]) print("debugging") # tree_var_ID = assignments["nodes"][0] try: walked_nodes = walkDepthFirstFnc(assignments["tree"], 0) except: print("problem, there is a problem here") nodes = [] for n in walked_nodes: nodes.append(assignments["nodes"][n]) print(assignments["nodes"][n]) nodes = assignments["nodes"] latex_var_equ = [] count = 0 for a in nodes: if "equation" in nodes[a]: print("debugging -- found equation:", nodes[a]) e, eq_str_ID = nodes[a].split("_") var_ID = latex_equations[eq_str_ID]["variable_ID"] eq = "%s := %s" % (latex_equations[eq_str_ID]["lhs"], latex_equations[eq_str_ID]["rhs"]) s = [ count, str(var_ID), eq_str_ID, eq, str(variables[var_ID]["tokens"]) ] latex_var_equ.append(s) count += 1 for a in nodes: if "variable" in nodes[a]: print("debugging -- found variable:", nodes[a]) v, var_str_ID = nodes[a].split("_") var_ID = int(var_str_ID) eqs = variables[var_ID]["equations"] if not eqs: eq = "%s :: %s" % ( compiled_variable_labels[var_ID], "\\text{port variable}" ) # (variables[var_ID]["aliases"]["latex"], "\\text{port variable}") s = [ count, var_str_ID, "-", eq, str(variables[var_ID]["tokens"]) ] latex_var_equ.append(s) count += 1 print("debugging -- got here") # get variable in LaTex form root_var = nodes[0] v, var_str_ID = root_var.split("_") var_ID = int(var_str_ID) lhs = variables[var_ID]["aliases"]["latex"] latex_var_equ = reversed(latex_var_equ) THIS_DIR = dirname(abspath(__file__)) j2_env = Environment(loader=FileSystemLoader(THIS_DIR), trim_blocks=True) template = FILES["latex_template_equation_list"] body = j2_env.get_template(template).render(variable=lhs, equations=latex_var_equ, dot=dot_graph_file) f_name = FILES["latex_equation_list"] % (ontology_name, file_name) f = open(f_name, 'w') f.write(body) f.close() shell_name = FILES["latex_shell_var_equ_list_command"] % ontology_name latex_location = DIRECTORIES["latex_location"] % ontology_name args = ['bash', shell_name, latex_location, file_name] # ontology_location + '/'] print('ARGS: ', args) try: # reports an error after completing the last one -- no idea make_it = subprocess.Popen(args, start_new_session=True) out, error = make_it.communicate() except: print("equation generation failed") pass
# #testing # if __name__ == '__main__': a = QtWidgets.QApplication([]) ontology_name = getOntologyName() dot_path = os.path.join(DIRECTORIES["ontology_repository"], ontology_name, DIRECTORIES["ontology_graphs_location"], "%s") o_template = dot_path # + ".gv" o = FILES["ontology_file"] % ontology_name ontology = getData(o)["ontology_tree"] # # the tree of networks f = o_template % "tree" print(f) graph_attr = {} graph_attr["nodesep"] = "1" graph_attr["ranksep"] = "0.3" # graph_attr.edge_attr["color"] = "blue" graph_attr["splines"] = "false" # ""polyline" edge_attr = {} # edge_attr["tailport"] = "s" # edge_attr["headport"] = "n" simple_graph = Digraph("T", filename=f) simple_graph.graph_attr = graph_attr
def getGraphData(networks, list_interconnection_networks, list_intraconnection_networks, list_NetworkNodeObjects, list_IntraNodeObjects, list_InterNodeObjects, list_arcObjects, tokens, graph_resource_file_spec): # get graph data dict_application_node_types = { NAMES["node"]: list_NetworkNodeObjects, NAMES["intraface"]: list_IntraNodeObjects, NAMES["interface"]: list_InterNodeObjects } application_arc_types = list_arcObjects DATA = GraphDataObjects(dict_application_node_types, application_arc_types) NETWORK = NetworkDataObjects(networks, list_interconnection_networks) TOKENS = TokenDataObjects(tokens) STATE_colours_set = set() for phase in STATES: for component in STATES[phase]: for state in STATES[phase][component]: STATE_colours_set.add(state) state_colours = {} for s in sorted(STATE_colours_set): state_colours[s] = Colour() # colour data # TODO -- cleaning operation is missing if os.path.exists(graph_resource_file_spec): data_dict = getData(graph_resource_file_spec) mouse_data = data_dict["data"] for p in DATA: for r in DATA[p]: for d in DATA[p][r]: shape = STRUCTURES_Graph_Item[r][d] # ["decoration"][d] for a in DATA[p][r][d]: for s in DATA[p][r][d][a]: try: obj = deepcopy(DATA_STRUCTURE[shape]) obj.update(mouse_data[p][r][d][a][s]) DATA[p][r][d][a][ s] = obj # mouse_data[p][r][d][a][s] # print(p, r, d, a, s, "-- OK") except: print(p, r, d, a, s, "-- x") pass # udate and clean out if "networks" in data_dict: NETWORK.update(data_dict["networks"]) delete_me = set() for nw in NETWORK: if (nw in networks) or (nw in list_interconnection_networks): pass else: delete_me.add(nw) print("delete network:", nw) for nw in delete_me: del NETWORK[nw] if "tokens" in data_dict: TOKENS.update(data_dict["tokens"]) delete_me = set() for token in TOKENS: if token not in tokens: delete_me.add(token) print("delete token :", token) for token in delete_me: del TOKENS[token] state_colours = {} for s in sorted(STATE_colours_set): state_colours[s] = Colour() # colour data if "states" in data_dict: state_colours.update(data_dict["states"]) return NETWORK, TOKENS, DATA, state_colours