def main(sid, aid, owl_file, template_files, script_files, constants_file): owl_txt = get_file_text(owl_file) templates_txt = dict() for template_file in template_files: templates_txt[template_file] = get_file_text(template_file) scripts_txt = dict() for script_file in script_files: scripts_txt[script_file] = get_file_text(script_file) cst_txt = get_file_text(constants_file) # If alphanumeric identifier was not defined, find the next available if aid is None: before_alnum = "nidm:NIDM_" # Find all alphanumeric identifiers in the owl file alphanum_ids = set(re.findall("(" + before_alnum + '\d+)\s+', owl_txt)) # Get identifier number for next alphanumeric identifier last_id = sorted(list(alphanum_ids))[-1] new_id_num = int(last_id.replace(before_alnum, "")) + 1 aid = before_alnum + "{0:0>7}".format(new_id_num) owl = OwlReader sid_name = sid.split(":")[1] sid_namespace = sid.split(":")[0] if sid_namespace == "nidm": uri = NIDM[sid_name] elif sid_namespace == "fsl": uri = FSL[sid_name] elif sid_namespace == "spm": uri = SPM[sid_name] owl = OwlReader(owl_file) label = owl.get_label(uri).split(":")[1].replace("'", "") # Replace all occurences of semantic id owl_txt = owl_txt.replace(sid + " ", aid + " ") # Replace ids in templates for tpl, tpl_txt in templates_txt.items(): templates_txt[tpl] = tpl_txt.replace(sid + " ", aid + " ") for scr, scr_txt in scripts_txt.items(): scripts_txt[scr] = scr_txt.replace('"' + sid + '"', '"' + aid + '"') new_constant = "NIDM_" + \ label.upper().replace(" ", "_").replace("-", "_") + \ " = NIDM['"+aid.replace("nidm:", "")+"']" cst_txt = cst_txt.replace("# NIDM constants", "# NIDM constants\n" + new_constant) replace_file_txt(owl_file, owl_txt) replace_file_txt(constants_file, cst_txt) for tpl, tpl_txt in templates_txt.items(): replace_file_txt(tpl, tpl_txt) for scr, scr_txt in scripts_txt.items(): replace_file_txt(scr, scr_txt)
def _load_owl(self, owl_file): if owl_file in self.owl_readers: self.owl = self.owl_readers[owl_file] else: # Retreive owl file for NIDM-Results # owl_file = os.path.join(RELPATH, 'terms', 'nidm-results.owl') # check the file exists assert os.path.exists(owl_file) # Read owl (turtle) file owl_path = os.path.dirname(owl_file) if not "extension" in os.path.dirname(owl_file): import_files = glob.glob(os.path.join(owl_path, \ os.pardir, os.pardir, "imports", '*.ttl')) else: import_files = glob.glob(os.path.join(owl_path, \ os.pardir, os.pardir, os.pardir, os.pardir, "imports", '*.ttl')) # Main ontology file import_files += glob.glob(os.path.join(owl_path, \ os.pardir, os.pardir, os.pardir, "terms", '*.owl')) self.owl = OwlReader(owl_file, import_files) self.owl_readers[owl_file] = self.owl
def __init__(self, parent=None): self.rdrOwl = OwlReader() self.rdrDesig = DesignatorReader() self.arrExperiences = [] self.knownTaskTypes = []
def __init__(self, name, owl_file, ttl_file, gt_ttl_files, exact_comparison): self.name = name self.ttl_file = ttl_file self.owl_file = owl_file self.gt_ttl_files = gt_ttl_files self.exact_comparison = exact_comparison self.graph = Graph() self.graph.parse(ttl_file, format='turtle') # Get NIDM-Results version for each example versions = self.graph.objects(None, NIDM_VERSION) assert versions is not None self.version = str(versions.next()) if self.version != "dev": self.gt_ttl_files = [ x.replace(os.path.join("nidm", "nidm"), os.path.join("nidm_releases", self.version, "nidm")) for x in self.gt_ttl_files ] self.owl_file = os.path.join( os.path.dirname(owl_file), "releases", "nidm-results_" + self.version.replace(".", "") + ".owl") owl_imports = None if self.version == "dev": owl_imports = glob.glob( os.path.join(os.path.dirname(owl_file), os.pardir, os.pardir, "imports", '*.ttl')) self.owl = OwlReader(self.owl_file, owl_imports)
class LogReader: def __init__(self): self.rdrOwl = OwlReader() def loadLog(self, strPath): log = Log() log.setOwlData(self.rdrOwl.loadOwl(strPath + "/cram_log.owl")) return log
def __init__(self, owl_file, import_files, spec_name, subcomponents=None, used_by=None, generated_by=None, derived_from=None, prefix=None): self.owl = OwlReader(owl_file, import_files) self.owl.graph.bind('nidm', 'http://purl.org/nidash/nidm#') self.name = spec_name self.component = self.name.lower().replace("-", "_") self.section_open = 0 self.already_defined_classes = list() self.attributes_done = set() self.text = "" self.create_specification(subcomponents, used_by, generated_by, derived_from, prefix)
def __init__(self, parent=None): self.rdrOwl = OwlReader() self.rdrDesig = DesignatorReader() self.arrExperiences = [] self.dicEntities = {} self.app = QtGui.QApplication(sys.argv) QtGui.QWidget.__init__(self, parent) self.ui = Ui_MainWindow() self.ui.setupUi(self) self.show() self.loadExperience( "/home/winkler/ros/catkin/src/semrec/scripts/bstools/Beliefstate Tools/Datasets/ds4/cram_log.owl", "/home/winkler/ros/catkin/src/semrec/scripts/bstools/Beliefstate Tools/Datasets/ds4/logged_designators.json" ) sys.exit(self.app.exec_())
class LogReader: def __init__(self): self.rdrOwl = OwlReader() self.rdrDesig = DesignatorReader() def loadLog(self, strPath): log = Log() log.setOwlData(self.rdrOwl.loadOwl(strPath + "/cram_log.owl")) log.setDesignatorData(self.rdrDesig.loadDesignators(strPath + "/logged_designators.json")) return log
def setUp(self, owl_file, owl_imports=None, test_files=None, parent_test_dir=None, parent_gt_dir=None): self.my_execption = "" self.owl_file = owl_file self.owl = OwlReader(owl_file, owl_imports) self.gt_dir = parent_gt_dir self.ex_graphs = dict()
def setUp(self): self.export_dir = os.path.join(TEST_FOLDER, 'nidm') if not os.path.isdir(self.export_dir): os.mkdir(self.export_dir) # Retreive owl file for NIDM-Results owl_file = os.path.join(TERM_RESULTS_DIR, 'nidm-results.owl') assert owl_file self.owl = OwlReader(owl_file) self.doc = ProvDocument() # self.bundle = ProvBundle(identifier=NIIRI[software_lc+'_results_id']) self.provn_file = os.path.join(self.export_dir, 'unit_test.provn') namespaces_file = os.path.join(TERM_RESULTS_DIR, "templates", \ "Namespaces.txt") namespaces_fid = open(namespaces_file) self.prefixes = namespaces_fid.read() namespaces_fid.close() self.to_delete_files = [self.provn_file] self.gt_ttl_files = list()
class LogReader: def __init__(self): self.rdrOwl = OwlReader() self.rdrDesig = DesignatorReader() def loadLog(self, strPath): log = Log() log.setOwlData(self.rdrOwl.loadOwl(strPath + "/cram_log.owl")) log.setDesignatorData( self.rdrDesig.loadDesignators(strPath + "/logged_designators.json")) return log
class ExperienceProcessor(QtGui.QMainWindow): def __init__(self, parent=None): self.rdrOwl = OwlReader() self.rdrDesig = DesignatorReader() self.arrExperiences = [] self.dicEntities = {} self.app = QtGui.QApplication(sys.argv) QtGui.QWidget.__init__(self, parent) self.ui = Ui_MainWindow() self.ui.setupUi(self) self.show() self.loadExperience( "/home/winkler/ros/catkin/src/semrec/scripts/bstools/Beliefstate Tools/Datasets/ds4/cram_log.owl", "/home/winkler/ros/catkin/src/semrec/scripts/bstools/Beliefstate Tools/Datasets/ds4/logged_designators.json" ) sys.exit(self.app.exec_()) def addExperience(self, expAdd): self.arrExperiences.append(expAdd) def loadExperience(self, strOwlFile, strDesignatorFile): logReturn = Log() logReturn.setOwlData(self.rdrOwl.loadOwl(strOwlFile)) logReturn.setDesignatorData( self.rdrDesig.loadDesignators(strDesignatorFile)) self.addExperience(logReturn) def update(self): self.updateEntities() self.renderCanvas() def updateEntities(self): pass def renderCanvas(self): for strName in self.dicEntities: self.renderEntity(dicEntities[strName]) def renderEntity(self): pass
def __init__(self, parent=None): self.rdrOwl = OwlReader() self.rdrDesig = DesignatorReader() self.arrExperiences = [] self.dicEntities = {} self.app = QtGui.QApplication(sys.argv) QtGui.QWidget.__init__(self, parent) self.ui = Ui_MainWindow() self.ui.setupUi(self) self.show() self.loadExperience("/home/winkler/ros/catkin/src/semrec/scripts/bstools/Beliefstate Tools/Datasets/ds4/cram_log.owl", "/home/winkler/ros/catkin/src/semrec/scripts/bstools/Beliefstate Tools/Datasets/ds4/logged_designators.json") sys.exit(self.app.exec_())
def setUp(self, owl_file, owl_imports=None, test_files=None, parent_test_dir=None, parent_gt_dir=None): self.my_execption = "" self.owl = OwlReader(owl_file, owl_imports) self.ex_graphs = dict() for ttl_name in test_files: ttl = parent_test_dir+ttl_name test_dir = os.path.dirname(ttl) with open(os.path.join(test_dir, 'config.json')) as data_file: metadata = json.load(data_file) gt_file = [os.path.join(parent_gt_dir, x) for x in metadata["ground_truth"]] inclusive = metadata["inclusive"] name = ttl.replace(parent_test_dir, "") self.ex_graphs[ttl_name] = ExampleGraph( name, owl_file, ttl, gt_file, inclusive)
class ExperienceProcessor(QtGui.QMainWindow): def __init__(self, parent=None): self.rdrOwl = OwlReader() self.rdrDesig = DesignatorReader() self.arrExperiences = [] self.dicEntities = {} self.app = QtGui.QApplication(sys.argv) QtGui.QWidget.__init__(self, parent) self.ui = Ui_MainWindow() self.ui.setupUi(self) self.show() self.loadExperience("/home/winkler/ros/catkin/src/semrec/scripts/bstools/Beliefstate Tools/Datasets/ds4/cram_log.owl", "/home/winkler/ros/catkin/src/semrec/scripts/bstools/Beliefstate Tools/Datasets/ds4/logged_designators.json") sys.exit(self.app.exec_()) def addExperience(self, expAdd): self.arrExperiences.append(expAdd) def loadExperience(self, strOwlFile, strDesignatorFile): logReturn = Log() logReturn.setOwlData(self.rdrOwl.loadOwl(strOwlFile)) logReturn.setDesignatorData(self.rdrDesig.loadDesignators(strDesignatorFile)) self.addExperience(logReturn) def update(self): self.updateEntities() self.renderCanvas() def updateEntities(self): pass def renderCanvas(self): for strName in self.dicEntities: self.renderEntity(dicEntities[strName]) def renderEntity(self): pass
def __init__(self, nidm_classes, example_file, one_file_per_class=False, owl_file=None, remove_att=None): self.nidm_classes = nidm_classes self.one_file_per_class = one_file_per_class self.remove_att = remove_att self.owl = None if owl_file is None: import_files = glob.glob(os.path.join(NIDMPATH, "imports", '*.ttl')) owl_file = os.path.join(NIDM_TERMS_DIR, 'nidm-results.owl') self.owl = OwlReader(owl_file, import_files) if not one_file_per_class: self.file = example_file else: self.dir = example_file
class OwlSpecification(object): def __init__(self, owl_file, import_files, spec_name, subcomponents=None, used_by=None, generated_by=None, derived_from=None, prefix=None, commentable=False, intro=None): self.owl = OwlReader(owl_file, import_files) self.owl.graph.bind('nidm', 'http://purl.org/nidash/nidm#') self.name = spec_name self.component = self.name.lower().replace("-", "_") self.section_open = 0 self.already_defined_classes = list() self.commentable = commentable self.attributes_done = set() self.text = "" self.create_specification(subcomponents, used_by, generated_by, derived_from, prefix, intro) def create_specification(self, subcomponents, used_by, generated_by, derived_from, prefix, intro=None): self.create_title(self.name + ": Types and relations", "definitions") if intro is not None: self.text += intro table_num = 3 for subcomponent_name, classes in subcomponents.items(): classes_by_types = self.owl.get_class_names_by_prov_type( classes, prefix=prefix, but=self.already_defined_classes) self.already_defined_classes += classes self.create_subcomponent_table(classes_by_types, table_num, subcomponent_name) table_num = table_num + 1 all_classes = classes_by_types[PROV['Agent']] + \ classes_by_types[PROV['Activity']] + \ classes_by_types[PROV['Entity']] + \ classes_by_types[None] for class_name in all_classes: self.create_class_section( class_name, self.owl.get_definition(class_name), self.owl.attributes.setdefault(class_name, None), used_by, generated_by, derived_from, children=not (self.owl.get_prov_class(class_name) == PROV['Entity'])) if subcomponent_name: self.text += """ </section>""" self.close_sections() def create_subcomponent_table(self, classes, table_num, subcomponent_name=None): if subcomponent_name: self.text += """ <section><h1>""" + subcomponent_name + """</h1>""" # Check if there is a header file to include here fname = os.path.join( INCLUDE_FOLDER, self.component + "_" + subcomponent_name.split(" ")[0].lower() + ".html") if os.path.isfile(fname): fid = open(fname, "r") self.text += fid.read() fid.close() else: subcomponent_name = "" # Did not find how to handle table numbering and ids with Respec as we # did for figures? table_id = "prov-mapping-" "" + subcomponent_name.lower() self.text += """ <div style="text-align: left;"> <table class="thinborder" \ style="margin-left: auto; margin-right: auto;"> <caption id=\"""" + table_id + """\">\ <a class="internalDFN" href=\"#""" + table_id + """\">\ Table """ + str(table_num) + """</a>:""" + self.name + """\ """ + subcomponent_name + """ Concepts</caption> \ <tbody> <tr> <th align="center"><b>""" + self.name + """ Concept</b>\ </th> <th align="center"><b>PROV type</b></th> <th align="center"><b>Identifier</b></th> </tr> """ self.text += """ <!-- HERE ------------- Beginning of PROV Entities ------------- --> """ for prov_class in list( [PROV['Agent'], PROV['Activity'], PROV['Entity']]): sorted_classes = classes[prov_class] for class_uri in sorted_classes: self.text += """ <tr> <td>""" + self.term_link(class_uri) + """ </td> """ # First iteration if class_uri is sorted_classes[0]: self.text += """ <td rowspan=\""""+str(len(sorted_classes)) + \ """\" style="text-align: center;"> """ + \ self.owl.get_label(prov_class) + \ """</td> """ self.text += """ <td>"""+self.owl.graph.qname(class_uri) + \ """</td> </tr> """ self.text += """ </tbody> </table> </div>""" def create_title(self, title, id=None): if id is None: self.text += """ <section> """ else: self.text += """ <section id=\"""" + id + """\"> """ self.text += """ <h1>""" + title + """</h1> """ self.section_open += 1 def _format_markdown(self, text): # Replace links specified in markdown by html text = markdown2.markdown(text).replace("<p>", "").replace("</p>", "") # Remove trailing new line text = text[0:-1] return text def format_definition(self, definition): # Capitalize first letter, format markdown and end with dot if definition: definition = definition[0].upper() + definition[1:] definition = self._format_markdown(definition) definition += "." return definition def linked_listing(self, uri_list, prefix="", suffix="", sort=True): linked_listing = prefix if sort: uri_list = self.owl.sorted_by_labels(uri_list) for i, uri in enumerate(uri_list): if i == 0: sep = "" elif i == len(uri_list): sep = " and " else: sep = ", " linked_listing += sep + self.term_link(uri) return linked_listing + suffix def term_link(self, term_uri, tag="a", text=None): href = "" if self.owl.is_external_namespace(term_uri): href = " href =\"" + str(term_uri) + "\"" if text is None: text = self.owl.get_label(term_uri) term_link = "<" + tag + " title=\"" + self.owl.get_name(term_uri) + \ "\"" + href + ">" + text+"</"+tag+">" # # This could be handled by Respec, here we overwrite the id and href # # fields in order to be able to have an id that is not generated from # # the title field. e.g. title = nidm_0000001 (nidm:Map) and # # id = nidm_0000001 # name_lw = self.owl.get_name(term_uri).lower() # if tag is "dfn": # link_info = " id=\"dfn-" + name_lw + "\"" # elif tag is "a": # link_info = " href=\"#dfn-" + name_lw + "\"" # term_link = "<" + tag + link_info + \ # " class=\"internalDFN\"" + \ # " title=\"" + self.owl.get_name(term_uri) + \ # " (" + self.owl.get_label(term_uri) + ")" + \ # "\"" + href + ">" + text + "</" + tag + ">" if tag is "dfn": issue_url = "https://github.com/incf-nidash/nidm/issues" # Add link to current definition term_link = self.term_link(term_uri, text=term_link) if self.commentable: term_link = term_link + \ " <a href=\""+issue_url+"?&q=is%3Aopen+'" + text + \ "'\"\"><sup>☆</sup></a>" + \ "<a href=\""+issue_url+"/new\";\"><sup>+</sup></a>" return term_link def create_class_section(self, class_uri, definition, attributes, used_by=None, generated_by=None, derived_from=None, children=False, is_range=False): class_label = self.owl.get_label(class_uri) class_name = self.owl.get_name(class_uri) definition = self.format_definition(definition) self.text += """ <!-- """ + class_label + """ (""" + class_name + """)""" + """ --> <section id="section-""" + class_label + """"> <h1 label=\"""" + class_name + """\">""" + class_label + """</h1> <div class="glossary-ref"> """ + self.term_link(class_uri, "dfn") + ": " + definition self.text += "<p> " + self.term_link(class_uri) + " is" nidm_class = self.owl.get_nidm_parent(class_uri) if nidm_class: self.text += " a " + self.term_link(nidm_class) else: prov_class = self.owl.get_prov_class(class_uri) if prov_class: self.text += " a " + self.owl.get_label(prov_class) found_used_by = False if used_by: if class_uri in used_by: self.text += self.linked_listing(used_by[class_uri], " used by ") found_used_by = True used_entities = list() for used_entity, used_activities in used_by.items(): for used_act in used_activities: if used_act == class_uri: used_entities.append(used_entity) if used_entities: self.text += self.linked_listing(used_entities, " that uses ", " entities") found_generated_by = False if generated_by: if class_uri in generated_by: if found_used_by: self.text += " and " self.text += self.linked_listing( list([generated_by[class_uri]]), " generated by ") found_generated_by = True if class_uri in generated_by.values(): generated_entities = list() for generated_entity, generated_act in generated_by.items(): if generated_act == class_uri: generated_entities.append(generated_entity) if generated_entities: self.text += self.linked_listing( generated_entities, ". This activity generates ", " entities") if derived_from: if class_uri in derived_from: if found_used_by or found_generated_by: self.text += " and " self.text += self.linked_listing( list([derived_from[class_uri]]), " derived from ") class_children = self.owl.get_direct_children(class_uri) if class_children: if found_used_by or found_generated_by: self.text += ". It " else: self.text += " and " self.text += " has the following child" if len(class_children) > 1: self.text += "ren" self.text += ": " + \ self.linked_listing(class_children) self.text += "." self.text += "</p>" range_classes = list() self.text += """ </div>""" if attributes and (attributes != set([CRYPTO['sha512']])): self.text += """ <p></p> <div class="attributes" id="attributes-"""+class_label + \ """"> A """ + \ self.term_link(class_uri)+""" has attributes: <ul> <li><span class="attribute" id=\"""" + \ class_label+""".label">rdfs:label</span>: \ (<em class="rfc2119" title="OPTIONAL">OPTIONAL</em>) """\ """Human readable description of the """ + \ self.term_link(class_uri)+""".</li>""" for att in sorted(attributes): # Do not display prov relations as attributes # (except prov:atLocation...) if not self.owl.is_prov(att) or (att == PROV['atLocation']): if att not in self.attributes_done: # First definition of this attribute att_tag = "dfn" else: att_tag = "a" self.attributes_done.add(att) # if att_label.startswith("nidm:"): att_def = self.owl.get_definition(att) self.text += """ <li>"""+self.term_link(att, att_tag) + \ '</span>: (<em class="rfc2119" title="OPTIONAL">' + \ 'OPTIONAL</em>) ' + self.format_definition(att_def) if att in self.owl.parent_ranges: child_ranges = list() for parent_range in self.owl.parent_ranges[att]: child_ranges += self.owl.get_direct_children( parent_range) if self.owl.get_label(parent_range).\ startswith('nidm'): range_classes.append(parent_range) child_ranges = sorted(child_ranges) # if nidm_namespace: child_range_txt = "" if child_ranges: # Get all child ranges child_range_txt = self.linked_listing( child_ranges, " such as ") self.text += self.linked_listing( self.owl.parent_ranges[att], " (range ", child_range_txt + ")") self.text += "." self.text += "</li>" self.text += """ </ul> </div>""" BASE_REPOSITORY = "https://raw.githubusercontent.com/" + \ "incf-nidash/nidm/master/" for title, example in self.owl.get_example(class_uri, BASE_REPOSITORY): self.text += """ </ul> </div> <pre class='example highlight' title=\""""+title+"""\">""" + \ cgi.escape(example) + """</pre>""" # For object property list also children (in sub-sections) if children: direct_children = self.owl.sorted_by_labels( self.owl.get_direct_children(class_uri)) for child in direct_children: if not child in self.already_defined_classes: self.create_class_section(child, self.owl.get_definition(child), self.owl.attributes.setdefault( child, None), children=True) self.already_defined_classes.append(child) # Display individuals individuals = self.owl.sorted_by_labels( self.owl.get_individuals(class_uri)) if individuals: self.text += \ " Examples of "+self.term_link(class_uri)+" includes " + \ "<ul>" for indiv in individuals: self.text += "<li>" + self.term_link(indiv, "dfn") + ": " + \ self.format_definition( self.owl.get_definition(indiv)) + \ "</li>" self.text += "</ul>" if is_range: self.text += """ </section>""" for range_name in self.owl.sorted_by_labels(range_classes): if not range_name in self.already_defined_classes: self.already_defined_classes.append(range_name) self.create_class_section(range_name, self.owl.get_definition(range_name), self.owl.attributes.setdefault( range_name, None), children=True, is_range=True) if not is_range: self.text += """ </section>""" def close_sections(self): for x in range(0, self.section_open): self.text += "\t" * x + "</section>\n" # Write out specification def write_specification(self, spec_file=None, component=None, version=None): if component and version: spec_file = os.path.join(DOC_FOLDER, component + "_" + version + ".html") spec_open = codecs.open(spec_file, 'w', "utf-8") spec_open.write(self.text) spec_open.close() def _header_footer(self, prev_file=None, follow_file=None, component=None, version=None): release_notes = None if component: prev_file = os.path.join(INCLUDE_FOLDER, component + "_" + version + "_head.html") if not os.path.isfile(prev_file): prev_file = os.path.join(INCLUDE_FOLDER, component + "_head.html") follow_file = os.path.join( INCLUDE_FOLDER, component + "_" + version + "_foot.html") if not os.path.isfile(follow_file): follow_file = os.path.join(INCLUDE_FOLDER, component + "_foot.html") if version: release_notes = os.path.join( os.path.dirname(self.owl.file), component + "_" + version + "_notes.html") if not os.path.isfile(release_notes): release_notes = None if prev_file is not None: prev_file_open = open(prev_file, 'r') self.text = prev_file_open.read().decode('utf-8') + self.text prev_file_open.close() if release_notes is not None: release_note_open = open(release_notes, 'r') self.text = self.text + release_note_open.read() release_note_open.close() if follow_file is not None: follow_file_open = open(follow_file, 'r') self.text = self.text + follow_file_open.read() follow_file_open.close()
class UpdateTermReadme(): def __init__(self, owl_file): self.owl = OwlReader(owl_file) # Write out Readme def write_readme(self, readme_file, readme_txt): readme_file_open = open(readme_file, 'w') readme_file_open.write(readme_txt) readme_file_open.close() def create_term_row(self, term_name, definition, same_as, editor, note, color, range_value=None, domain=None, indiv_type=None): img_color = "" if color: img_color = '<img src="../../../doc/content/specs/img/' + color + '.png?raw=true"/> ' if same_as: same_as = "(same as: <a href=" + same_as + ">" + same_as + "</a>)" range_domain_type = "" if range_value is not None: range_domain_type = """ <td>""" + domain + """</td> <td>""" + range_value + """</td>""" if indiv_type is not None: range_domain_type += """ <td>""" + indiv_type + """</td>""" # Github mardow-like links nidm_repo = "https://github.com/incf-nidash/nidm/" stato_repo = "https://github.com/ISA-tools/stato/" nidm_pr_issue = re.compile(nidm_repo + r'[a-zA-Z]*/(\d+)') note = nidm_pr_issue.sub( r'<a href="' + nidm_repo + r'pull/\1">' + r'#\1</a>', note) stato_pr_issue = re.compile(stato_repo + r'[a-zA-Z]*/(\d+)') note = stato_pr_issue.sub( r'<a href="' + stato_repo + r'pull/\1">' + r'ISA-tools/stato#\1</a>', note) if note: note = note + "<br/>" # Add a search link (to check current state of the repo) if "Under discussion" in note: search_text = "more" else: search_text = "find issues/PR" note = note + "<a href=\"" + nidm_repo + "/issues?&q=" + term_name.split( ":")[1] + "\"> [" + search_text + "] </a>" term_row = """ <tr> <td>""" + img_color + """</td> <td>""" + note + """</td> <td><b>""" + term_name + """: </b>""" + definition + same_as + editor + """</td>""" + range_domain_type + """ </tr>""" return term_row def create_curation_legend(self, order): curation_legend = "<b>Curation status</b>: \n" curation_colors_sorted = [(key, CURATION_COLORS.get(key)) for key in order] covered_colors = list() for curation_color in curation_colors_sorted: # curation_status = str(self.owl.qname(curation_color[0])) # curation_status_labels = self.owl.objects(curation_color[0], RDFS['label']) # curation_status = ", ".join(list(curation_status_labels)) color = curation_color[1] if not color in covered_colors: curation_legend = curation_legend+'<img src="../../../doc/content/specs/img/'+color+'.png?raw=true"/> '+\ CURATION_LEGEND[color]+";\n" covered_colors.append(color) return curation_legend # Get README text according to owl file information def update_readme(self, readme_file): class_terms = dict() prpty_terms = dict() indiv_terms = dict() definitions = dict() editors = dict() notes = dict() ranges = dict() domains = dict() sameas = dict() types = dict() for owl_term in self.owl.classes.union(self.owl.properties).union( self.owl.individuals): curation_status = self.owl.get_curation_status(owl_term) definition = self.owl.get_definition(owl_term) if definition == "": definition = "<undefined>" editor = self.owl.get_editor(owl_term) note = self.owl.get_editor_note(owl_term) range_value = self.owl.get_range(owl_term) domain = self.owl.get_domain(owl_term) same = self.owl.get_same_as(owl_term) indiv_type = self.owl.get_individual_type(owl_term) if curation_status: curation_key = curation_status term_key = self.owl.get_label(owl_term) if term_key.startswith("nidm") or term_key.startswith("spm") or\ term_key.startswith("fsl") or term_key.startswith("afni"): if owl_term in self.owl.classes: class_terms.setdefault(curation_key, list()).append(term_key) else: if owl_term in self.owl.properties: prpty_terms.setdefault(curation_key, list()).append(term_key) else: if owl_term in self.owl.individuals: indiv_terms.setdefault(curation_key, list()).append(term_key) definitions[term_key] = definition editors[term_key] = editor notes[term_key] = note ranges[term_key] = range_value domains[term_key] = domain sameas[term_key] = same types[term_key] = indiv_type # Include missing keys and do not display ready for release terms order = CURATION_ORDER + (list( set(class_terms.keys()).union(set(prpty_terms.keys())) - set(CURATION_ORDER + list([OBO_READY])))) class_terms_sorted = [(key, class_terms.get(key)) for key in order] prpty_terms_sorted = [(key, prpty_terms.get(key)) for key in order] indiv_terms_sorted = [(key, indiv_terms.get(key)) for key in order] class_table_txt = "<h2>Classes</h2>\n<table>\n<tr><th>Curation Status</th><th>Issue/PR</th><th>Term</th></tr>" for tuple_status_term in class_terms_sorted: curation_status = tuple_status_term[0] class_names = tuple_status_term[1] if class_names: for class_name in sorted(class_names): class_table_txt += self.create_term_row(class_name, \ definitions[class_name], \ sameas[class_name], \ editors[class_name], \ notes[class_name], \ CURATION_COLORS.setdefault(curation_status, "")) class_table_txt = class_table_txt + "\n</table>" prpty_table_txt = "<h2>Properties</h2>\n<table>\n<tr><th>Curation Status</th><th>Issue/PR</th><th>Term</th><th>Domain</th><th>Range</th></tr>" for tuple_status_term in prpty_terms_sorted: curation_status = tuple_status_term[0] term_names = tuple_status_term[1] if term_names: for term_name in sorted(term_names): prpty_table_txt += self.create_term_row(term_name, \ definitions[term_name], \ sameas[term_name], \ editors[term_name], \ notes[term_name], \ CURATION_COLORS.setdefault(curation_status, ""), \ ranges[term_name], \ domains[term_name]) prpty_table_txt = prpty_table_txt + "\n</table>" indiv_table_txt = "<h2>Individuals</h2>\n<table>\n<tr><th>Curation Status</th><th>Issue/PR</th><th>Term</th><th>Type</th></tr>" for tuple_status_term in indiv_terms_sorted: curation_status = tuple_status_term[0] term_names = tuple_status_term[1] if term_names: for term_name in sorted(term_names): indiv_table_txt += self.create_term_row(term_name, \ definitions[term_name], \ sameas[term_name], \ editors[term_name], \ notes[term_name], \ CURATION_COLORS.setdefault(curation_status, ""), \ None, None, types[term_name]) indiv_table_txt = indiv_table_txt + "\n</table>" curation_legend = self.create_curation_legend(order) title = "<h1>NIDM-Results Terms curation status</h1>" intro = """You will find below a listing of the NIDM-Results terms that \ need to be curated. If you would like **to help with the curation of a term, \ please follow those steps**: 1. Check if the terms is already under discussion in an issue. 2. If not, create a new issue including the current definition (available in\ the table below) and your proposed update. If possible, priority should be given to uncurated terms (in red). Thank you in advance for taking part in NIDM-Results term curation!\n\n""" self.write_readme(readme_file, title+intro+\ curation_legend+class_table_txt+prpty_table_txt+indiv_table_txt)
def __init__(self): self.rdrOwl = OwlReader() self.rdrDesig = DesignatorReader()
def __init__(self, owl_file): self.owl = OwlReader(owl_file)
class MemoryCondenser: def __init__(self, parent=None): self.rdrOwl = OwlReader() self.rdrDesig = DesignatorReader() self.arrExperiences = [] def countExperiences(self): return len(self.arrExperiences) def addExperience(self, expAdd): self.arrExperiences.append(expAdd) def loadExperience(self, strOwlFile, strDesignatorFile): logReturn = Log() logReturn.setOwlData(self.rdrOwl.loadOwl(strOwlFile)) if strDesignatorFile != "": logReturn.setDesignatorData( self.rdrDesig.loadDesignators(strDesignatorFile)) self.addExperience(logReturn) def condenseData(self, dataOwl): result = None self.tti = dataOwl["task-tree-individuals"] owlMeta = dataOwl["metadata"] owlAnnot = dataOwl["annotation"] if owlMeta: result = { "Toplevel": self.condenseNodes("", owlMeta.subActions()) } else: print "No meta data in file!" return result def condenseNodes(self, strParentNode, arrNodes, nLevel=0): arrTypes = {} arrIndividuals = {} for strNode in arrNodes: owlNode = self.tti[strNode] ident = owlNode.taskContext() failures = owlNode.failures() failure = "" if len(failures) > 0: failure = self.tti[failures[0]].type() result = self.condenseNodes(strNode, owlNode.subActions(), nLevel + 1) if not ident in arrTypes: arrTypes[ident] = result else: arrTypes[ident] = self.unifyResults(arrTypes[ident], result) arrTypes[ident]["individuals"][strNode] = { "parameters": owlNode.annotatedParameters(True), "parent": strParentNode, "failure": failure } return {"subTypes": arrTypes, "individuals": {}} def unifyResults(self, res1, res2): resparams = {} if len(res1["individuals"]) > 0: resparams = res1["individuals"] if len(res2["individuals"]) > 0: resparams = dict(resparams.items() + res2["individuals"].items()) unified = {"subTypes": {}, "individuals": resparams} for ressub1 in res1["subTypes"]: if ressub1 in res2["subTypes"]: unified["subTypes"][ressub1] = self.unifyResults( res1["subTypes"][ressub1], res2["subTypes"][ressub1]) else: unified["subTypes"][ressub1] = res1["subTypes"][ressub1] for ressub2 in res2["subTypes"]: if not ressub2 in res1["subTypes"]: unified["subTypes"][ressub2] = res2["subTypes"][ressub2] return unified def condense(self): arrStartNodes = [] self.tti = {} for experience in self.arrExperiences: owlData = experience.getOwlData() metaData = owlData["metadata"] arrStartNodes += metaData.subActions() self.tti.update(owlData["task-tree-individuals"]) self.processed_nodes = [] tree = self.condenseNodesByContext(arrStartNodes) parameters = {} for node in self.processed_nodes: params = self.tti[node].annotatedParameters( bSingularParameters=True) if len(params) > 0: parameters[node] = {} for p in params: if not p == "_time_created": parameters[node][p.lstrip("parameter-")] = params[p] result = {"tree": tree, "parameters": parameters} print result def sortComparatorActionTime(self, action1, action2): if action1.timeSpan[0] > action2.timeSpan[0]: return 1 elif action1.timeSpan[0] == action2.timeSpan[0]: return 0 else: return -1 def sortActionsByTime(self, actions): actions.sort(self.sortComparatorActionTime) return actions def condenseNodesByContext(self, nodes): # Separate nodes by their taskContext dicContexts = {} for node in nodes: self.processed_nodes.append(node) owlNode = self.tti[node] if not owlNode.taskContext() in dicContexts: dicContexts[owlNode.taskContext()] = { "nodes": [], "terminal-state": [] } dicContexts[owlNode.taskContext()]["nodes"].append(node) for context in dicContexts: all_children = [] for node in dicContexts[context]["nodes"]: sub_actions = self.sortActionsByTime( self.tti[node].subActions()) if len(sub_actions) > 0: all_children += sub_actions else: dicContexts[context]["terminal-state"].append(node) dicContexts[context]["children"] = self.condenseNodesByContext( all_children) return dicContexts def generalizeExperiences(self): self.generalizedExperience = {} self.tti = {} arrStartNodes = [] for experience in self.arrExperiences: owlData = experience.getOwlData() metaData = owlData["metadata"] arrStartNodes += metaData.subActions() self.tti.update(owlData["task-tree-individuals"]) for node in arrStartNodes: self.injectActionIntoGeneralizedExperience( node, self.generalizedExperience) print self.generalizedExperience def injectActionIntoGeneralizedExperience(self, action, target_branch): target_branch["a"] = 5 def dotNode(self, node, first): dot = "" tti = self.t_tti[node] former_subnode = "" dot += " {rank=same;" for subnode in tti.subActions(): dot += " " + subnode dot += "}\n" for subnode in tti.subActions(): dot += " " + subnode + " [shape=box, label=\"" + self.t_tti[ subnode].taskContext() + "\"]\n" #dot += " edge "; if first == True: first = False dot += "edge [dir=both, arrowhead=normal, arrowtail=none]" dot += "\n " + node + " -> " + subnode + "\n" else: pass #dot += "[dir=both, arrowhead=diamond, arrowtail=ediamond]" if not former_subnode == "": dot += " edge [arrowhead=empty, arrowtail=none]\n" dot += " " + former_subnode + " -> " + subnode + "\n" dot += self.dotNode(subnode, True) former_subnode = subnode if len(tti.subActions()) == 0 and tti.nextAction() == None: #dot += " terminal_state_" + node + " [shape=doublecircle, label=\"\"]\n" #dot += " edge [arrowhead=empty, arrowtail=none]\n" #dot += " " + node + " -> terminal_state_" + node + "\n" pass return dot def printExperiences(self, dot): for experience in self.arrExperiences: if dot: self.printDotExperience(experience) else: self.printRawExperience(experience) def printRawExperience(self, experience): owlData = experience.getOwlData() metaData = owlData["metadata"] start_nodes = metaData.subActions() self.t_tti = owlData["task-tree-individuals"] for node in start_nodes: self.printRawExperienceNode(node) def printRawExperienceNode(self, node, level=0): indent = " " * level owl = self.t_tti[node] parameters = owl.annotatedParameters() param_str = "(" first = True for parameter in parameters: if not parameter == "_time_created": if first == True: first = False else: param_str = param_str + ", " key_str = parameter[ 10:] if parameter[:10] == "parameter-" else parameter param_str = param_str + key_str + "=" + parameters[parameter][0] param_str = param_str + ")" print indent + owl.taskContext() + " " + param_str if len(owl.subActions()) > 0: for node in owl.subActions(): self.printRawExperienceNode(node, level + 1) def printDotExperience(self, experience): owlData = experience.getOwlData() metaData = owlData["metadata"] start_nodes = metaData.subActions() self.t_tti = owlData["task-tree-individuals"] dot = "digraph plangraph {\n" dot += " label=\"Original Experiences\"\n" dot += " labeljust=center\n" dot += " labelloc=top\n" for node in start_nodes: dot += " " + node + " [shape=box, label=\"" + self.t_tti[ node].taskContext() + "\"]\n\n" dot += self.dotNode(node, True) dot += "}\n" print dot def compareSubActions(self, subaction1, subaction2): if subaction1 == subaction2: return 0 next_action = subaction1 while next_action != None: next_action = self.tti[subaction1].nextAction() if next_action == subaction2: return 1 return -1 def sortSubActions(self, subactions): subactions.sort(self.compareSubActions) return subactions def sortSubActionsList(self, subactions_list): sorted_list = [] for subactions in subactions_list: sorted_list.append(self.sortSubActions(subactions)) return sorted_list def generalizeNodes(self, nodes): sequences = [] for node in nodes: sequences.append(self.tti[node].subActions()) return sequences def workOnExperiences(self): start_nodes = [] self.tti = {} for experience in self.arrExperiences: owlData = experience.getOwlData() metaData = owlData["metadata"] start_nodes += metaData.subActions() self.tti.update(owlData["task-tree-individuals"]) print self.generalizeNodes(start_nodes) def injectExperiences(self, deduced=False, data=False): self.arrInjected = {} self.tti = {} self.uid_counter = 0 root_action_count = 0 for experience in self.arrExperiences: owlData = experience.getOwlData() metaData = owlData["metadata"] self.tti.update(owlData["task-tree-individuals"]) for node in metaData.subActions(): self.injectExperienceNode(node, self.arrInjected) root_action_count = root_action_count + 1 for experience in self.arrExperiences: owlData = experience.getOwlData() metaData = owlData["metadata"] self.tti.update(owlData["task-tree-individuals"]) for node in metaData.subActions(): self.checkForTerminalStateOccurrences( self.tti[node].taskContext(), self.arrInjected) for node in metaData.subActions(): self.checkForOptionalInjectedNodes( self.tti[node].taskContext(), self.arrInjected) if deduced: self.printDeduced(dot=not data, root_action_count=root_action_count) else: self.printInjected(dot=not data) def injectExperienceNode(self, node, frame, rootlevel=False, invocation_path={}): ctx = self.tti[node].taskContext() params = self.tti[node].annotatedParameters() params_fixed = {} call_pattern = "" for param in params: if not param == "_time_created" and not param == "CALLPATTERN": key_str = param[10:] if param[:10] == "parameter-" else param params_fixed[key_str] = params[param][0] elif param == "CALLPATTERN": call_pattern = params[param][0] new_invocation_path = invocation_path.copy() if not ctx in frame: new_invocation_path.update({self.uid_counter: params_fixed}) frame[ctx] = { "children": {}, "next-actions": {}, "uid": self.uid_counter, "terminal-state": "false", "start-state": "false", "optional": "false", "instances": 0, "invocations": [new_invocation_path], "call-pattern": call_pattern } self.uid_counter = self.uid_counter + 1 else: new_invocation_path.update({frame[ctx]["uid"]: params_fixed}) frame[ctx]["invocations"].append(new_invocation_path) frame[ctx]["instances"] = frame[ctx]["instances"] + 1 sub_nodes = self.tti[node].subActions() for sub in sub_nodes: self.injectExperienceNode(sub, frame[ctx]["children"], False, new_invocation_path) next_node = self.tti[node].nextAction() if next_node: current_ctx = ctx while next_node: nextCtx = self.tti[next_node].taskContext() call_pattern = self.tti[next_node].annotatedParameterValue( "CALLPATTERN") if not call_pattern: call_pattern = "" if not current_ctx in frame: frame[current_ctx] = { "children": {}, "next-actions": {}, "uid": self.uid_counter, "terminal-state": "false", "start-state": "false", "optional": "false", "instances": 0, "invocations": [], "call-pattern": call_pattern } self.uid_counter = self.uid_counter + 1 if not nextCtx in frame[current_ctx][ "next-actions"] and not rootlevel: if not nextCtx == current_ctx: if not nextCtx in frame[current_ctx]["next-actions"]: frame[current_ctx]["next-actions"][nextCtx] = [] params = self.tti[next_node].annotatedParameters() params_fixed = {} for param in params: if not param == "_time_created": key_str = param[ 10:] if param[:10] == "parameter-" else param params_fixed[key_str] = params[param][0] frame[current_ctx]["next-actions"][nextCtx].append( params_fixed) next_node = self.tti[next_node].nextAction() current_ctx = nextCtx else: if len(self.tti[node].subActions()) == 0: frame[ctx]["terminal-state"] = "true" if frame[ctx]["start-state"] == "false": if self.tti[node].previousAction() == None: frame[ctx]["start-state"] = "true" def checkForOptionalInjectedNodes(self, ctx, frame, parent_instances=-1, came_from=None): if not "check-optional" in frame[ctx]: frame[ctx]["check-optional"] = "done" came_from_terminates = False came_from_valid = True if came_from: if came_from == ctx: came_from_valid = False if frame[came_from]["terminal-state"] == "true": came_from_terminates = True if came_from_valid == True: if frame[ctx][ "instances"] < parent_instances or came_from_terminates: frame[ctx]["optional"] = "true" for child in frame[ctx]["children"]: if frame[ctx]["children"][child]["start-state"] == "true": self.checkForOptionalInjectedNodes(child, frame[ctx]["children"], frame[ctx]["instances"]) for next_action in frame[ctx]["next-actions"]: if next_action in frame and not next_action == ctx: self.checkForOptionalInjectedNodes(next_action, frame, frame[ctx]["instances"], ctx) def checkForTerminalStateOccurrences(self, ctx, frame): if not "check-terminal" in frame[ctx]: frame[ctx]["check-terminal"] = "done" child_instances = 0 next_instances = 0 if frame[ctx]["terminal-state"] == "true": for child in frame[ctx]["children"]: if frame[ctx]["children"][child]["start-state"] == "true": child_instances = child_instances + frame[ctx][ "children"][child]["instances"] for next_action in frame[ctx]["next-actions"]: if next_action in frame and not next_action == ctx: next_instances = next_instances + frame[next_action][ "instances"] terminal_instances = frame[ctx]["instances"] - ( child_instances + next_instances) if terminal_instances > 0: frame[ctx]["terminal-instances"] = terminal_instances else: frame[ctx]["terminal-instances"] = 0 else: frame[ctx]["terminal-state"] = "false" frame[ctx]["terminal-instances"] = 0 for child in frame[ctx]["children"]: self.checkForTerminalStateOccurrences(child, frame[ctx]["children"]) for next_action in frame[ctx]["next-actions"]: if next_action in frame and not next_action == ctx: self.checkForTerminalStateOccurrences(next_action, frame) def printDeduced(self, dot=False, root_action_count=1): # TODO: Extend this to use all top-level nodes in case they # are different self.global_ctx_counter = 0 deduced = self.expandPathways(self.arrInjected.keys()[0], self.arrInjected, root_action_count) fixed_deduced = [] for d in deduced: fixed_singular = d[2:] for step in fixed_singular: for invocation in step["invocations"]: invocation.pop(0, 0) invocation.pop(1, 0) fixed_deduced.append(fixed_singular) with open("deduced_experiences.json", "w") as f: json.dump(fixed_deduced, f) if dot: self.printDotDeduced(deduced) else: print deduced def expandPathways(self, ctx, nodes, root_action_count, trace=[]): expanded_pathways = [] if not nodes[ctx]["uid"] in trace: current_node = [{ "node": ctx, "instances": nodes[ctx]["instances"], "uid": nodes[ctx]["uid"], "rel-occ": (float(nodes[ctx]["instances"]) / float(root_action_count)), "rel-term": (float(nodes[ctx]["terminal-instances"]) / float(nodes[ctx]["instances"])), "invocations": nodes[ctx]["invocations"], "call-pattern": nodes[ctx]["call-pattern"] }] children = self.getStartNodes(nodes[ctx]["children"]) had_non_optional_children = False for child in children: if not children[child]["optional"] == "true": had_non_optional_children = True child_pathways = self.expandPathways( child, nodes[ctx]["children"], nodes[ctx]["instances"], trace + [nodes[ctx]["uid"]]) for child_pathway in child_pathways: expanded_pathways.append(current_node + child_pathway) if not had_non_optional_children: expanded_pathways.append(current_node) next_actions = nodes[ctx]["next-actions"] final_pathways = [] had_non_optional_next_actions = False for next_action in next_actions: if next_action != ctx: if not nodes[next_action]["optional"] == "true": had_non_optional_next_actions = True expanded_next_pathways = self.expandPathways( next_action, nodes, nodes[ctx]["instances"], trace + [nodes[ctx]["uid"]]) for expanded_next_pathway in expanded_next_pathways: for expanded_pathway in expanded_pathways: final_pathways = final_pathways + [ expanded_pathway + expanded_next_pathway ] if not had_non_optional_next_actions: final_pathways = final_pathways + expanded_pathways return final_pathways else: return [] def getStartNodes(self, nodes): start_nodes = {} for node in nodes: if nodes[node]["start-state"] == "true": start_nodes[node] = nodes[node] return start_nodes def printInjected(self, dot=False): if dot: self.printInjectedDot() else: print self.arrInjected def printInjectedChildren(self, children, parent=None): dot = "" edge_pointers = {} next_action_parameters = {} ids = {} optionals = {} parent_id = parent if not parent: parent_id = "root" for child in children: child_id = "node_" + child.replace("-", "_") + "_" + str( self.counterdot) ids[child] = child_id self.counterdot = self.counterdot + 1 label = child if label[:21] == "REPLACEABLE-FUNCTION-": label = label[21:] dot += " " + child_id + " [shape=box, label=\"" + label + " (" + str( children[child]["uid"]) + " / " + str( children[child]["instances"]) + ")\"]\n" if children[child]["terminal-state"] == "true": if children[child]["terminal-instances"] > 0: dot += " ts_" + str( self.counterdot ) + " [shape=doublecircle, label=\"" + str( children[child]["terminal-instances"]) + "\"]\n" dot += " edge [style=dashed, arrowhead=normal, arrowtail=none, label=\"terminal\"]\n" dot += " " + child_id + " -> " + "ts_" + str( self.counterdot) + "\n" dot += self.printInjectedChildren(children[child]["children"], child_id) if parent: if children[child]["start-state"] == "true": if children[child]["optional"] == "true": dot += " edge [style=solid, arrowhead=normal, arrowtail=none, label=\"optional\"]\n" else: dot += " edge [style=solid, arrowhead=normal, arrowtail=none, label=\"\"]\n" else: if children[child]["optional"] == "true": dot += " edge [style=dashed, arrowhead=none, arrowtail=none, label=\"\"]\n" else: dot += " edge [style=dashed, arrowhead=none, arrowtail=none, label=\"\"]\n" dot += " " + parent + " -> " + child_id + "\n" for na in children[child]["next-actions"]: if parent: if not na in edge_pointers: edge_pointers[na] = [] next_action_parameters[na] = {} if not child_id in next_action_parameters[na]: next_action_parameters[na][child_id] = [] edge_pointers[na].append(child_id) #next_action_parameters[na][child_id].append(children[child]["next-actions"][na]) #print "!" #print next_action_parameters for child in children: child_id = ids[child] if child in edge_pointers: for target in edge_pointers[child]: param_str = "" # for param_sets in next_action_parameters[child][target]: # for param_set in param_sets: # first_p = True # for p in param_set: # if first_p: # first_p = False # else: # param_str = param_str + ", " # param_str = param_str + p + " = " + param_set[p] # param_str = param_str + "\\n" #if next_action_parameters[ if children[child]["optional"] == "true": param_str = "optional" else: param_str = "" dot += " {rank=same; " + child_id + " " + target + "}\n" dot += " edge [style=solid, arrowhead=empty, arrowtail=none, label=\"" + param_str + "\"]\n" dot += " " + target + " -> " + child_id + "\n" return dot def printInjectedDot(self): self.counterdot = 0 self.edge_pointers = {} dot = "digraph condensed {\n" dot += " graph []\n" #ranksep=0.5#nodesep=0.5#pad=0.5 dot += " label=\"Condensed Experience Graph\"\n" dot += " labeljust=center\n" dot += " labelloc=top\n" dot += self.printInjectedChildren(self.arrInjected) dot += "}\n" print dot def expScore(self, exp): acc_score = 1.0 for item in exp: instances = item["instances"] rel_occ = item["rel-occ"] acc_score = acc_score * rel_occ last_item = exp[len(exp) - 1] acc_score = acc_score * last_item["rel-term"] return acc_score def expScoreCmp(self, exp1, exp2): score1 = self.expScore(exp1) score2 = self.expScore(exp2) if score1 < score2: return 1 elif score1 > score2: return -1 else: return 0 def printDotDeduced(self, deduced): counter = 0 subgraphcounter = 0 dot = "digraph deduced {\n" dot += " label=\"Deduced Possible Action Paths\"\n" dot += " labeljust=center\n" dot += " labelloc=top\n" highest_score = 0 for line in deduced: acc_score = self.expScore(line) if acc_score > highest_score: highest_score = acc_score deduced.sort(self.expScoreCmp) for line in deduced: dot += " \n" dot += " subgraph cluster_" + str(subgraphcounter) + " {\n" dot += " pencolor=transparent;\n" dot += " \n" subgraphcounter = subgraphcounter + 1 first = True acc_score = 1.0 for item in line: instances = item["instances"] node = item["node"] rel_occ = item["rel-occ"] # Correct node label if node[:21] == "REPLACEABLE-FUNCTION-": node = node[21:] acc_score = acc_score * rel_occ if not first: dot += " node_" + str(counter - 1) + " -> node_" + str( counter) + "\n" else: first = False dot += " node_" + str( counter) + " [shape=box, label=\"" + node + " (" + str( round(rel_occ, 2)) + ")\"]\n" counter = counter + 1 last_item = line[len(line) - 1] dot += " ts_" + str(counter - 1) + " [shape=doublecircle, label=\"" + str( round(last_item["rel-term"], 2)) + "\"]\n" dot += " edge [style=dashed, arrowhead=normal, arrowtail=none, label=\"\"]\n" dot += " node_" + str(counter - 1) + " -> " + "ts_" + str(counter - 1) + "\n" acc_score = acc_score * last_item["rel-term"] dot += " \n" dot += " label=\"Score: " + str(round(acc_score, 2)) + "\";\n" dot += " labeljust=center;\n" dot += " labelloc=top;\n" dot += " }\n" dot += "}\n" print dot
class UpdateTermReadme(): def __init__(self, owl_file): self.owl = OwlReader(owl_file) # Write out Readme def write_readme(self, readme_file, readme_txt): readme_file_open = open(readme_file, 'w') readme_file_open.write(readme_txt) readme_file_open.close() def create_term_row(self, term_name, definition, same_as, editor, note, color, range_value=None, domain=None, indiv_type=None): img_color = "" if color: img_color = '<img src="../../../doc/content/specs/img/'+color+'.png?raw=true"/> ' if same_as: same_as = "(same as: <a href="+same_as+">"+same_as+"</a>)" range_domain_type = "" if range_value is not None: range_domain_type = """ <td>"""+domain+"""</td> <td>"""+range_value+"""</td>""" if indiv_type is not None: range_domain_type += """ <td>"""+indiv_type+"""</td>""" # Github mardow-like links nidm_repo = "https://github.com/incf-nidash/nidm/" stato_repo = "https://github.com/ISA-tools/stato/" nidm_pr_issue = re.compile(nidm_repo+r'[a-zA-Z]*/(\d+)') note = nidm_pr_issue.sub(r'<a href="'+nidm_repo+r'pull/\1">'+r'#\1</a>', note) stato_pr_issue = re.compile(stato_repo+r'[a-zA-Z]*/(\d+)') note = stato_pr_issue.sub(r'<a href="'+stato_repo+r'pull/\1">'+r'ISA-tools/stato#\1</a>', note) if note: note = note+"<br/>" # Add a search link (to check current state of the repo) if "Under discussion" in note: search_text = "more" else: search_text = "find issues/PR" note = note+"<a href=\""+nidm_repo+"/issues?&q="+term_name.split(":")[1]+"\"> ["+search_text+"] </a>" term_row = """ <tr> <td>"""+img_color+"""</td> <td>"""+note+"""</td> <td><b>"""+term_name+""": </b>"""+definition+same_as+editor+"""</td>"""+range_domain_type+""" </tr>""" return term_row def create_curation_legend(self, order): curation_legend = "<b>Curation status</b>: \n" curation_colors_sorted = [(key, CURATION_COLORS.get(key)) for key in order] covered_colors = list() for curation_color in curation_colors_sorted: # curation_status = str(self.owl.qname(curation_color[0])) # curation_status_labels = self.owl.objects(curation_color[0], RDFS['label']) # curation_status = ", ".join(list(curation_status_labels)) color = curation_color[1] if not color in covered_colors: curation_legend = curation_legend+'<img src="../../../doc/content/specs/img/'+color+'.png?raw=true"/> '+\ CURATION_LEGEND[color]+";\n" covered_colors.append(color) return curation_legend # Get README text according to owl file information def update_readme(self, readme_file): class_terms = dict() prpty_terms = dict() indiv_terms = dict() definitions = dict() editors = dict() notes = dict() ranges = dict() domains = dict() sameas = dict() types = dict() for owl_term in self.owl.classes.union(self.owl.properties).union(self.owl.individuals): curation_status = self.owl.get_curation_status(owl_term) definition = self.owl.get_definition(owl_term) if definition == "": definition = "<undefined>" editor = self.owl.get_editor(owl_term) note = self.owl.get_editor_note(owl_term) range_value = self.owl.get_range(owl_term) domain = self.owl.get_domain(owl_term) same = self.owl.get_same_as(owl_term) indiv_type = self.owl.get_individual_type(owl_term) if curation_status: curation_key = curation_status term_key = self.owl.get_label(owl_term) if term_key.startswith("nidm") or term_key.startswith("spm") or\ term_key.startswith("fsl") or term_key.startswith("afni"): if owl_term in self.owl.classes: class_terms.setdefault(curation_key, list()).append(term_key) else: if owl_term in self.owl.properties: prpty_terms.setdefault(curation_key, list()).append(term_key) else: if owl_term in self.owl.individuals: indiv_terms.setdefault(curation_key, list()).append(term_key) definitions[term_key] = definition editors[term_key] = editor notes[term_key] = note ranges[term_key] = range_value domains[term_key] = domain sameas[term_key] = same types[term_key] = indiv_type # Include missing keys and do not display ready for release terms order=CURATION_ORDER+(list(set(class_terms.keys()).union(set(prpty_terms.keys())) - set(CURATION_ORDER+list([OBO_READY])))) class_terms_sorted = [(key, class_terms.get(key)) for key in order] prpty_terms_sorted = [(key, prpty_terms.get(key)) for key in order] indiv_terms_sorted = [(key, indiv_terms.get(key)) for key in order] class_table_txt = "<h2>Classes</h2>\n<table>\n<tr><th>Curation Status</th><th>Issue/PR</th><th>Term</th></tr>" for tuple_status_term in class_terms_sorted: curation_status = tuple_status_term[0] class_names = tuple_status_term[1] if class_names: for class_name in sorted(class_names): class_table_txt += self.create_term_row(class_name, \ definitions[class_name], \ sameas[class_name], \ editors[class_name], \ notes[class_name], \ CURATION_COLORS.setdefault(curation_status, "")) class_table_txt = class_table_txt+"\n</table>" prpty_table_txt = "<h2>Properties</h2>\n<table>\n<tr><th>Curation Status</th><th>Issue/PR</th><th>Term</th><th>Domain</th><th>Range</th></tr>" for tuple_status_term in prpty_terms_sorted: curation_status = tuple_status_term[0] term_names = tuple_status_term[1] if term_names: for term_name in sorted(term_names): prpty_table_txt += self.create_term_row(term_name, \ definitions[term_name], \ sameas[term_name], \ editors[term_name], \ notes[term_name], \ CURATION_COLORS.setdefault(curation_status, ""), \ ranges[term_name], \ domains[term_name]) prpty_table_txt = prpty_table_txt+"\n</table>" indiv_table_txt = "<h2>Individuals</h2>\n<table>\n<tr><th>Curation Status</th><th>Issue/PR</th><th>Term</th><th>Type</th></tr>" for tuple_status_term in indiv_terms_sorted: curation_status = tuple_status_term[0] term_names = tuple_status_term[1] if term_names: for term_name in sorted(term_names): indiv_table_txt += self.create_term_row(term_name, \ definitions[term_name], \ sameas[term_name], \ editors[term_name], \ notes[term_name], \ CURATION_COLORS.setdefault(curation_status, ""), \ None, None, types[term_name]) indiv_table_txt = indiv_table_txt+"\n</table>" curation_legend = self.create_curation_legend(order) title = "<h1>NIDM-Results Terms curation status</h1>" intro = """You will find below a listing of the NIDM-Results terms that \ need to be curated. If you would like **to help with the curation of a term, \ please follow those steps**: 1. Check if the terms is already under discussion in an issue. 2. If not, create a new issue including the current definition (available in\ the table below) and your proposed update. If possible, priority should be given to uncurated terms (in red). Thank you in advance for taking part in NIDM-Results term curation!\n\n""" self.write_readme(readme_file, title+intro+\ curation_legend+class_table_txt+prpty_table_txt+indiv_table_txt)
class MemoryCondenser: def __init__(self, parent=None): self.rdrOwl = OwlReader() self.rdrDesig = DesignatorReader() self.arrExperiences = [] def countExperiences(self): return len(self.arrExperiences) def addExperience(self, expAdd): self.arrExperiences.append(expAdd) def loadExperience(self, strOwlFile, strDesignatorFile): logReturn = Log() logReturn.setOwlData(self.rdrOwl.loadOwl(strOwlFile)) if strDesignatorFile != "": logReturn.setDesignatorData(self.rdrDesig.loadDesignators(strDesignatorFile)) self.addExperience(logReturn) def condenseData(self, dataOwl): result = None self.tti = dataOwl["task-tree-individuals"] owlMeta = dataOwl["metadata"] owlAnnot = dataOwl["annotation"] if owlMeta: result = {"Toplevel" : self.condenseNodes("", owlMeta.subActions())}; else: print "No meta data in file!" return result def condenseNodes(self, strParentNode, arrNodes, nLevel = 0): arrTypes = {} arrIndividuals = {} for strNode in arrNodes: owlNode = self.tti[strNode] ident = owlNode.taskContext() failures = owlNode.failures() failure = "" if len(failures) > 0: failure = self.tti[failures[0]].type() result = self.condenseNodes(strNode, owlNode.subActions(), nLevel + 1) if not ident in arrTypes: arrTypes[ident] = result else: arrTypes[ident] = self.unifyResults(arrTypes[ident], result) arrTypes[ident]["individuals"][strNode] = {"parameters" : owlNode.annotatedParameters(True), "parent" : strParentNode, "failure" : failure} return {"subTypes" : arrTypes, "individuals" : {}} def unifyResults(self, res1, res2): resparams = {} if len(res1["individuals"]) > 0: resparams = res1["individuals"] if len(res2["individuals"]) > 0: resparams = dict(resparams.items() + res2["individuals"].items()) unified = {"subTypes" : {}, "individuals" : resparams} for ressub1 in res1["subTypes"]: if ressub1 in res2["subTypes"]: unified["subTypes"][ressub1] = self.unifyResults(res1["subTypes"][ressub1], res2["subTypes"][ressub1]) else: unified["subTypes"][ressub1] = res1["subTypes"][ressub1] for ressub2 in res2["subTypes"]: if not ressub2 in res1["subTypes"]: unified["subTypes"][ressub2] = res2["subTypes"][ressub2] return unified def condense(self): arrStartNodes = [] self.tti = {} for experience in self.arrExperiences: owlData = experience.getOwlData() metaData = owlData["metadata"] arrStartNodes += metaData.subActions() self.tti.update(owlData["task-tree-individuals"]) self.processed_nodes = [] tree = self.condenseNodesByContext(arrStartNodes) parameters = {} for node in self.processed_nodes: params = self.tti[node].annotatedParameters(bSingularParameters = True) if len(params) > 0: parameters[node] = {} for p in params: if not p == "_time_created": parameters[node][p.lstrip("parameter-")] = params[p] result = {"tree": tree, "parameters": parameters} print result def sortComparatorActionTime(self, action1, action2): if action1.timeSpan[0] > action2.timeSpan[0]: return 1 elif action1.timeSpan[0] == action2.timeSpan[0]: return 0 else: return -1 def sortActionsByTime(self, actions): actions.sort(self.sortComparatorActionTime) return actions def condenseNodesByContext(self, nodes): # Separate nodes by their taskContext dicContexts = {} for node in nodes: self.processed_nodes.append(node) owlNode = self.tti[node] if not owlNode.taskContext() in dicContexts: dicContexts[owlNode.taskContext()] = {"nodes": [], "terminal-state": []} dicContexts[owlNode.taskContext()]["nodes"].append(node) for context in dicContexts: all_children = [] for node in dicContexts[context]["nodes"]: sub_actions = self.sortActionsByTime(self.tti[node].subActions()) if len(sub_actions) > 0: all_children += sub_actions else: dicContexts[context]["terminal-state"].append(node) dicContexts[context]["children"] = self.condenseNodesByContext(all_children) return dicContexts def generalizeExperiences(self): self.generalizedExperience = {} self.tti = {} arrStartNodes = [] for experience in self.arrExperiences: owlData = experience.getOwlData() metaData = owlData["metadata"] arrStartNodes += metaData.subActions() self.tti.update(owlData["task-tree-individuals"]) for node in arrStartNodes: self.injectActionIntoGeneralizedExperience(node, self.generalizedExperience) print self.generalizedExperience def injectActionIntoGeneralizedExperience(self, action, target_branch): target_branch["a"] = 5 def dotNode(self, node, first): dot = "" tti = self.t_tti[node] former_subnode = "" dot += " {rank=same;" for subnode in tti.subActions(): dot += " " +subnode dot += "}\n" for subnode in tti.subActions(): dot += " " + subnode + " [shape=box, label=\"" + self.t_tti[subnode].taskContext() + "\"]\n" #dot += " edge "; if first == True: first = False dot += "edge [dir=both, arrowhead=normal, arrowtail=none]" dot += "\n " + node + " -> " + subnode + "\n" else: pass#dot += "[dir=both, arrowhead=diamond, arrowtail=ediamond]" if not former_subnode == "": dot += " edge [arrowhead=empty, arrowtail=none]\n" dot += " " + former_subnode + " -> " + subnode + "\n" dot += self.dotNode(subnode, True) former_subnode = subnode if len(tti.subActions()) == 0 and tti.nextAction() == None: #dot += " terminal_state_" + node + " [shape=doublecircle, label=\"\"]\n" #dot += " edge [arrowhead=empty, arrowtail=none]\n" #dot += " " + node + " -> terminal_state_" + node + "\n" pass return dot def printExperiences(self, dot): for experience in self.arrExperiences: if dot: self.printDotExperience(experience) else: self.printRawExperience(experience) def printRawExperience(self, experience): owlData = experience.getOwlData() metaData = owlData["metadata"] start_nodes = metaData.subActions() self.t_tti = owlData["task-tree-individuals"] for node in start_nodes: self.printRawExperienceNode(node) def printRawExperienceNode(self, node, level = 0): indent = " " * level owl = self.t_tti[node] parameters = owl.annotatedParameters() param_str = "(" first = True for parameter in parameters: if not parameter == "_time_created": if first == True: first = False else: param_str = param_str + ", " key_str = parameter[10:] if parameter[:10] == "parameter-" else parameter param_str = param_str + key_str + "=" + parameters[parameter][0] param_str = param_str + ")" print indent + owl.taskContext() + " " + param_str if len(owl.subActions()) > 0: for node in owl.subActions(): self.printRawExperienceNode(node, level + 1) def printDotExperience(self, experience): owlData = experience.getOwlData() metaData = owlData["metadata"] start_nodes = metaData.subActions() self.t_tti = owlData["task-tree-individuals"] dot = "digraph plangraph {\n" dot += " label=\"Original Experiences\"\n" dot += " labeljust=center\n" dot += " labelloc=top\n" for node in start_nodes: dot += " " + node + " [shape=box, label=\"" + self.t_tti[node].taskContext() + "\"]\n\n" dot += self.dotNode(node, True) dot += "}\n" print dot def compareSubActions(self, subaction1, subaction2): if subaction1 == subaction2: return 0 next_action = subaction1 while next_action != None: next_action = self.tti[subaction1].nextAction() if next_action == subaction2: return 1 return -1 def sortSubActions(self, subactions): subactions.sort(self.compareSubActions) return subactions def sortSubActionsList(self, subactions_list): sorted_list = [] for subactions in subactions_list: sorted_list.append(self.sortSubActions(subactions)) return sorted_list def generalizeNodes(self, nodes): sequences = [] for node in nodes: sequences.append(self.tti[node].subActions()) return sequences def workOnExperiences(self): start_nodes = [] self.tti = {} for experience in self.arrExperiences: owlData = experience.getOwlData() metaData = owlData["metadata"] start_nodes += metaData.subActions() self.tti.update(owlData["task-tree-individuals"]) print self.generalizeNodes(start_nodes) def injectExperiences(self, deduced = False, data = False): self.arrInjected = {} self.tti = {} self.uid_counter = 0; root_action_count = 0 for experience in self.arrExperiences: owlData = experience.getOwlData() metaData = owlData["metadata"] self.tti.update(owlData["task-tree-individuals"]) for node in metaData.subActions(): self.injectExperienceNode(node, self.arrInjected) root_action_count = root_action_count + 1 for experience in self.arrExperiences: owlData = experience.getOwlData() metaData = owlData["metadata"] self.tti.update(owlData["task-tree-individuals"]) for node in metaData.subActions(): self.checkForTerminalStateOccurrences(self.tti[node].taskContext(), self.arrInjected) for node in metaData.subActions(): self.checkForOptionalInjectedNodes(self.tti[node].taskContext(), self.arrInjected) if deduced: self.printDeduced(dot = not data, root_action_count = root_action_count) else: self.printInjected(dot = not data) def injectExperienceNode(self, node, frame, rootlevel = False, invocation_path = {}): ctx = self.tti[node].taskContext() params = self.tti[node].annotatedParameters() params_fixed = {} call_pattern = "" for param in params: if not param == "_time_created" and not param == "CALLPATTERN": key_str = param[10:] if param[:10] == "parameter-" else param params_fixed[key_str] = params[param][0] elif param == "CALLPATTERN": call_pattern = params[param][0] new_invocation_path = invocation_path.copy() if not ctx in frame: new_invocation_path.update({self.uid_counter: params_fixed}) frame[ctx] = {"children": {}, "next-actions" : {}, "uid" : self.uid_counter, "terminal-state": "false", "start-state": "false", "optional": "false", "instances": 0, "invocations": [new_invocation_path], "call-pattern": call_pattern} self.uid_counter = self.uid_counter + 1 else: new_invocation_path.update({frame[ctx]["uid"]: params_fixed}) frame[ctx]["invocations"].append(new_invocation_path) frame[ctx]["instances"] = frame[ctx]["instances"] + 1 sub_nodes = self.tti[node].subActions() for sub in sub_nodes: self.injectExperienceNode(sub, frame[ctx]["children"], False, new_invocation_path) next_node = self.tti[node].nextAction() if next_node: current_ctx = ctx while next_node: nextCtx = self.tti[next_node].taskContext() call_pattern = self.tti[next_node].annotatedParameterValue("CALLPATTERN") if not call_pattern: call_pattern = "" if not current_ctx in frame: frame[current_ctx] = {"children": {}, "next-actions" : {}, "uid" : self.uid_counter, "terminal-state": "false", "start-state": "false", "optional": "false", "instances": 0, "invocations": [], "call-pattern": call_pattern} self.uid_counter = self.uid_counter + 1 if not nextCtx in frame[current_ctx]["next-actions"] and not rootlevel: if not nextCtx == current_ctx: if not nextCtx in frame[current_ctx]["next-actions"]: frame[current_ctx]["next-actions"][nextCtx] = [] params = self.tti[next_node].annotatedParameters() params_fixed = {} for param in params: if not param == "_time_created": key_str = param[10:] if param[:10] == "parameter-" else param params_fixed[key_str] = params[param][0] frame[current_ctx]["next-actions"][nextCtx].append(params_fixed) next_node = self.tti[next_node].nextAction() current_ctx = nextCtx else: if len(self.tti[node].subActions()) == 0: frame[ctx]["terminal-state"] = "true" if frame[ctx]["start-state"] == "false": if self.tti[node].previousAction() == None: frame[ctx]["start-state"] = "true" def checkForOptionalInjectedNodes(self, ctx, frame, parent_instances = -1, came_from = None): if not "check-optional" in frame[ctx]: frame[ctx]["check-optional"] = "done" came_from_terminates = False came_from_valid = True if came_from: if came_from == ctx: came_from_valid = False if frame[came_from]["terminal-state"] == "true": came_from_terminates = True if came_from_valid == True: if frame[ctx]["instances"] < parent_instances or came_from_terminates: frame[ctx]["optional"] = "true" for child in frame[ctx]["children"]: if frame[ctx]["children"][child]["start-state"] == "true": self.checkForOptionalInjectedNodes(child, frame[ctx]["children"], frame[ctx]["instances"]) for next_action in frame[ctx]["next-actions"]: if next_action in frame and not next_action == ctx: self.checkForOptionalInjectedNodes(next_action, frame, frame[ctx]["instances"], ctx) def checkForTerminalStateOccurrences(self, ctx, frame): if not "check-terminal" in frame[ctx]: frame[ctx]["check-terminal"] = "done" child_instances = 0 next_instances = 0 if frame[ctx]["terminal-state"] == "true": for child in frame[ctx]["children"]: if frame[ctx]["children"][child]["start-state"] == "true": child_instances = child_instances + frame[ctx]["children"][child]["instances"] for next_action in frame[ctx]["next-actions"]: if next_action in frame and not next_action == ctx: next_instances = next_instances + frame[next_action]["instances"] terminal_instances = frame[ctx]["instances"] - (child_instances + next_instances) if terminal_instances > 0: frame[ctx]["terminal-instances"] = terminal_instances else: frame[ctx]["terminal-instances"] = 0 else: frame[ctx]["terminal-state"] = "false" frame[ctx]["terminal-instances"] = 0 for child in frame[ctx]["children"]: self.checkForTerminalStateOccurrences(child, frame[ctx]["children"]) for next_action in frame[ctx]["next-actions"]: if next_action in frame and not next_action == ctx: self.checkForTerminalStateOccurrences(next_action, frame) def printDeduced(self, dot = False, root_action_count = 1): # TODO: Extend this to use all top-level nodes in case they # are different self.global_ctx_counter = 0 deduced = self.expandPathways(self.arrInjected.keys()[0], self.arrInjected, root_action_count) fixed_deduced = [] for d in deduced: fixed_singular = d[2:] for step in fixed_singular: for invocation in step["invocations"]: invocation.pop(0, 0) invocation.pop(1, 0) fixed_deduced.append(fixed_singular) with open("deduced_experiences.json", "w") as f: json.dump(fixed_deduced, f) if dot: self.printDotDeduced(deduced) else: print deduced def expandPathways(self, ctx, nodes, root_action_count, trace = []): expanded_pathways = [] if not nodes[ctx]["uid"] in trace: current_node = [{"node": ctx, "instances": nodes[ctx]["instances"], "uid": nodes[ctx]["uid"], "rel-occ": (float(nodes[ctx]["instances"]) / float(root_action_count)), "rel-term": (float(nodes[ctx]["terminal-instances"]) / float(nodes[ctx]["instances"])), "invocations": nodes[ctx]["invocations"], "call-pattern": nodes[ctx]["call-pattern"]}] children = self.getStartNodes(nodes[ctx]["children"]) had_non_optional_children = False for child in children: if not children[child]["optional"] == "true": had_non_optional_children = True child_pathways = self.expandPathways(child, nodes[ctx]["children"], nodes[ctx]["instances"], trace + [nodes[ctx]["uid"]]) for child_pathway in child_pathways: expanded_pathways.append(current_node + child_pathway) if not had_non_optional_children: expanded_pathways.append(current_node) next_actions = nodes[ctx]["next-actions"] final_pathways = [] had_non_optional_next_actions = False for next_action in next_actions: if next_action != ctx: if not nodes[next_action]["optional"] == "true": had_non_optional_next_actions = True expanded_next_pathways = self.expandPathways(next_action, nodes, nodes[ctx]["instances"], trace + [nodes[ctx]["uid"]]) for expanded_next_pathway in expanded_next_pathways: for expanded_pathway in expanded_pathways: final_pathways = final_pathways + [expanded_pathway + expanded_next_pathway] if not had_non_optional_next_actions: final_pathways = final_pathways + expanded_pathways return final_pathways else: return [] def getStartNodes(self, nodes): start_nodes = {} for node in nodes: if nodes[node]["start-state"] == "true": start_nodes[node] = nodes[node] return start_nodes def printInjected(self, dot = False): if dot: self.printInjectedDot() else: print self.arrInjected def printInjectedChildren(self, children, parent = None): dot = "" edge_pointers = {} next_action_parameters = {} ids = {} optionals = {} parent_id = parent if not parent: parent_id = "root" for child in children: child_id = "node_" + child.replace("-", "_") + "_" + str(self.counterdot) ids[child] = child_id self.counterdot = self.counterdot + 1 label = child if label[:21] == "REPLACEABLE-FUNCTION-": label = label[21:] dot += " " + child_id + " [shape=box, label=\"" + label + " (" + str(children[child]["uid"]) + " / " + str(children[child]["instances"]) + ")\"]\n" if children[child]["terminal-state"] == "true": if children[child]["terminal-instances"] > 0: dot += " ts_" + str(self.counterdot) + " [shape=doublecircle, label=\"" + str(children[child]["terminal-instances"]) + "\"]\n" dot += " edge [style=dashed, arrowhead=normal, arrowtail=none, label=\"terminal\"]\n" dot += " " + child_id + " -> " + "ts_" + str(self.counterdot) + "\n" dot += self.printInjectedChildren(children[child]["children"], child_id) if parent: if children[child]["start-state"] == "true": if children[child]["optional"] == "true": dot += " edge [style=solid, arrowhead=normal, arrowtail=none, label=\"optional\"]\n" else: dot += " edge [style=solid, arrowhead=normal, arrowtail=none, label=\"\"]\n" else: if children[child]["optional"] == "true": dot += " edge [style=dashed, arrowhead=none, arrowtail=none, label=\"\"]\n" else: dot += " edge [style=dashed, arrowhead=none, arrowtail=none, label=\"\"]\n" dot += " " + parent + " -> " + child_id + "\n" for na in children[child]["next-actions"]: if parent: if not na in edge_pointers: edge_pointers[na] = [] next_action_parameters[na] = {} if not child_id in next_action_parameters[na]: next_action_parameters[na][child_id] = [] edge_pointers[na].append(child_id) #next_action_parameters[na][child_id].append(children[child]["next-actions"][na]) #print "!" #print next_action_parameters for child in children: child_id = ids[child] if child in edge_pointers: for target in edge_pointers[child]: param_str = "" # for param_sets in next_action_parameters[child][target]: # for param_set in param_sets: # first_p = True # for p in param_set: # if first_p: # first_p = False # else: # param_str = param_str + ", " # param_str = param_str + p + " = " + param_set[p] # param_str = param_str + "\\n" #if next_action_parameters[ if children[child]["optional"] == "true": param_str = "optional" else: param_str = "" dot += " {rank=same; " + child_id + " " + target + "}\n" dot += " edge [style=solid, arrowhead=empty, arrowtail=none, label=\"" + param_str + "\"]\n" dot += " " + target + " -> " + child_id + "\n" return dot def printInjectedDot(self): self.counterdot = 0 self.edge_pointers = {} dot = "digraph condensed {\n" dot += " graph []\n"#ranksep=0.5#nodesep=0.5#pad=0.5 dot += " label=\"Condensed Experience Graph\"\n" dot += " labeljust=center\n" dot += " labelloc=top\n" dot += self.printInjectedChildren(self.arrInjected) dot += "}\n" print dot def expScore(self, exp): acc_score = 1.0 for item in exp: instances = item["instances"] rel_occ = item["rel-occ"] acc_score = acc_score * rel_occ last_item = exp[len(exp) - 1] acc_score = acc_score * last_item["rel-term"] return acc_score def expScoreCmp(self, exp1, exp2): score1 = self.expScore(exp1) score2 = self.expScore(exp2) if score1 < score2: return 1 elif score1 > score2: return -1 else: return 0 def printDotDeduced(self, deduced): counter = 0 subgraphcounter = 0 dot = "digraph deduced {\n" dot += " label=\"Deduced Possible Action Paths\"\n" dot += " labeljust=center\n" dot += " labelloc=top\n" highest_score = 0 for line in deduced: acc_score = self.expScore(line) if acc_score > highest_score: highest_score = acc_score deduced.sort(self.expScoreCmp) for line in deduced: dot += " \n" dot += " subgraph cluster_" + str(subgraphcounter) + " {\n" dot += " pencolor=transparent;\n" dot += " \n" subgraphcounter = subgraphcounter + 1 first = True acc_score = 1.0 for item in line: instances = item["instances"] node = item["node"] rel_occ = item["rel-occ"] # Correct node label if node[:21] == "REPLACEABLE-FUNCTION-": node = node[21:] acc_score = acc_score * rel_occ if not first: dot += " node_" + str(counter - 1) + " -> node_" + str(counter) + "\n" else: first = False dot += " node_" + str(counter) + " [shape=box, label=\"" + node + " (" + str(round(rel_occ, 2)) + ")\"]\n" counter = counter + 1 last_item = line[len(line) - 1] dot += " ts_" + str(counter - 1) + " [shape=doublecircle, label=\"" + str(round(last_item["rel-term"], 2)) + "\"]\n" dot += " edge [style=dashed, arrowhead=normal, arrowtail=none, label=\"\"]\n" dot += " node_" + str(counter - 1) + " -> " + "ts_" + str(counter - 1) + "\n" acc_score = acc_score * last_item["rel-term"] dot += " \n" dot += " label=\"Score: " + str(round(acc_score, 2)) + "\";\n" dot += " labeljust=center;\n" dot += " labelloc=top;\n" dot += " }\n" dot += "}\n" print dot
class TestExamples(unittest.TestCase): def __init__(self, *args, **kwargs): super(TestExamples, self).__init__(*args, **kwargs) namespaces_def = os.path.join(RELPATH, "terms", "templates", 'Namespaces.txt') fid = open(namespaces_def, "r") namespaces = fid.read() fid.close() self.term_examples = glob.glob(os.path.join(RELPATH, "terms", "examples", '*.txt')) self.example_files = example_filenames.union(self.term_examples) self.examples = dict() self.owl_files = dict() for example_file in self.example_files: provn_file = os.path.join(os.path.dirname(os.path.dirname( os.path.abspath(__file__))), example_file) # ttl_file_url = get_turtle(provn_file) ttl_file = provn_file.replace(".provn", ".ttl") # Read turtle self.examples[example_file] = Graph() if example_file in self.term_examples: fid = open(ttl_file, "r") ttl_txt = fid.read() fid.close() self.examples[example_file].parse(data=namespaces+ttl_txt, format='turtle') else: self.examples[example_file].parse(ttl_file, format='turtle') term_dir = os.path.join(os.path.dirname(ttl_file), os.pardir, 'terms') if not os.path.isdir(term_dir): term_dir = os.path.join(os.path.dirname(ttl_file), os.pardir, os.pardir, 'terms') owl_files = glob.glob(os.path.join(term_dir, '*.owl')) self.owl_files[example_file] = owl_files[0] self.owl_readers = dict() def _load_owl(self, owl_file): if owl_file in self.owl_readers: self.owl = self.owl_readers[owl_file] else: # Retreive owl file for NIDM-Results # owl_file = os.path.join(RELPATH, 'terms', 'nidm-results.owl') # check the file exists assert os.path.exists(owl_file) # Read owl (turtle) file owl_path = os.path.dirname(owl_file) if not "extension" in os.path.dirname(owl_file): import_files = glob.glob(os.path.join(owl_path, \ os.pardir, os.pardir, "imports", '*.ttl')) else: import_files = glob.glob(os.path.join(owl_path, \ os.pardir, os.pardir, os.pardir, os.pardir, "imports", '*.ttl')) # Main ontology file import_files += glob.glob(os.path.join(owl_path, \ os.pardir, os.pardir, os.pardir, "terms", '*.owl')) self.owl = OwlReader(owl_file, import_files) self.owl_readers[owl_file] = self.owl def test_check_classes(self): logger.info("TestExamples: test_check_classes") my_exception = dict() for example_file in self.example_files: example_name = example_file example_graph = self.examples[example_file] owl = self.owl_files[example_file] self._load_owl(owl) # Check that all entity, activity, agent are defined in the data model exception_msg = self.owl.check_class_names(example_graph, example_name) my_exception = merge_exception_dict(my_exception, exception_msg) # Aggredate errors over examples for conciseness if my_exception: error_msg = "" for unrecognised_class_name, examples in my_exception.items(): error_msg += unrecognised_class_name+" (from "+', '.join(examples)+")" raise Exception(error_msg) def test_check_attributes(self): logger.info("TestExamples: test_check_attributes") my_exception = dict() my_range_exception = dict() my_restriction_exception = dict() for example_file in self.example_files: example_name = example_file example_graph = self.examples[example_file] owl = self.owl_files[example_file] self._load_owl(owl) exception_msg = self.owl.check_attributes(example_graph, example_name) my_exception = merge_exception_dict(my_exception, exception_msg[0]) if not example_file in self.term_examples: my_range_exception = merge_exception_dict(my_range_exception, exception_msg[1]) else: # Ignore range exceptions for test examples (as for object # properties the linked object will be missing) my_range_exception = dict() my_restriction_exception = merge_exception_dict(my_restriction_exception, exception_msg[2]) # Aggregate errors over examples for conciseness error_msg = "" for found_exception in list([my_exception, my_range_exception, my_restriction_exception]): if found_exception: for unrecognised_attribute, example_names in found_exception.items(): error_msg += unrecognised_attribute+" (from "+', '.join(example_names)+")" # if my_range_exception: # for unrecognised_range, example_names in my_range_exception.items(): # error_msg += unrecognised_range+" (from "+', '.join(example_names)+")" if error_msg: raise Exception(error_msg)
def main(sid, aid, owl_file, template_files, script_files, constants_file): owl_txt = get_file_text(owl_file) templates_txt = dict() for template_file in template_files: templates_txt[template_file] = get_file_text(template_file) scripts_txt = dict() for script_file in script_files: scripts_txt[script_file] = get_file_text(script_file) cst_txt = get_file_text(constants_file) # If alphanumeric identifier was not defined, find the next available if aid is None: before_alnum = "nidm:NIDM_" # Find all alphanumeric identifiers in the owl file alphanum_ids = set(re.findall("("+before_alnum+'\d+)\s+', owl_txt)) # Get identifier number for next alphanumeric identifier last_id = sorted(list(alphanum_ids))[-1] new_id_num = int(last_id.replace(before_alnum, ""))+1 aid = before_alnum+"{0:0>7}".format(new_id_num) owl = OwlReader sid_name = sid.split(":")[1] sid_namespace = sid.split(":")[0] if sid_namespace == "nidm": uri = NIDM[sid_name] elif sid_namespace == "fsl": uri = FSL[sid_name] elif sid_namespace == "spm": uri = SPM[sid_name] owl = OwlReader(owl_file) label = owl.get_label(uri).split(":")[1].replace("'", "") # Replace all occurences of semantic id owl_txt = owl_txt.replace(sid+" ", aid+" ") # Replace ids in templates for tpl, tpl_txt in templates_txt.items(): templates_txt[tpl] = tpl_txt.replace(sid+" ", aid+" ") for scr, scr_txt in scripts_txt.items(): scripts_txt[scr] = scr_txt.replace('"'+sid+'"', '"'+aid+'"') new_constant = "NIDM_" + \ label.upper().replace(" ", "_").replace("-", "_") + \ " = NIDM['"+aid.replace("nidm:", "")+"']" cst_txt = cst_txt.replace("# NIDM constants", "# NIDM constants\n"+new_constant) replace_file_txt(owl_file, owl_txt) replace_file_txt(constants_file, cst_txt) for tpl, tpl_txt in templates_txt.items(): replace_file_txt(tpl, tpl_txt) for scr, scr_txt in scripts_txt.items(): replace_file_txt(scr, scr_txt)
class OwlSpecification(object): def __init__(self, owl_file, import_files, spec_name, subcomponents=None, used_by=None, generated_by=None, derived_from=None, prefix=None): self.owl = OwlReader(owl_file, import_files) self.owl.graph.bind('nidm', 'http://purl.org/nidash/nidm#') self.name = spec_name self.component = self.name.lower().replace("-", "_") self.section_open = 0 self.already_defined_classes = list() self.attributes_done = set() self.text = "" self.create_specification(subcomponents, used_by, generated_by, derived_from, prefix) def create_specification(self, subcomponents, used_by, generated_by, derived_from, prefix): self.create_title(self.name+": Types and relations") # If no subcomponents are defined display all classes if not subcomponents: subcomponents = dict([(None, self.owl.classes)]) table_num = 3 for subcomponent_name, classes in subcomponents.items(): classes_by_types = self.owl.get_class_names_by_prov_type( classes, prefix=prefix, but=self.already_defined_classes) self.already_defined_classes += classes self.create_subcomponent_table(classes_by_types, table_num, subcomponent_name) table_num = table_num + 1 all_classes = sorted(classes_by_types[PROV['Agent']]) + \ sorted(classes_by_types[PROV['Activity']]) + \ sorted(classes_by_types[PROV['Entity']]) + \ sorted(classes_by_types[None]) for class_name in all_classes: self.create_class_section( class_name, self.owl.get_definition(class_name), self.owl.attributes.setdefault(class_name, None), used_by, generated_by, derived_from) if subcomponent_name: self.text += """ </section>""" self.close_sections() def create_subcomponent_table(self, classes, table_num, subcomponent_name=None): if subcomponent_name: self.text += """ <section><h1>"""+subcomponent_name+"""</h1>""" # Check if there is a header file to include here fname = os.path.join( INCLUDE_FOLDER, self.component+"_" + subcomponent_name.split(" ")[0].lower()+".html") if os.path.isfile(fname): fid = open(fname, "r") self.text += fid.read() fid.close() else: subcomponent_name = "" # Did not find how to handle table numbering and ids with Respec as we # did for figures? table_id = "prov-mapping-"""+subcomponent_name.lower() self.text += """ <div style="text-align: left;"> <table class="thinborder" \ style="margin-left: auto; margin-right: auto;"> <caption id=\""""+table_id+"""\">\ <a class="internalDFN" href=\"#"""+table_id+"""\">\ Table """+str(table_num)+"""</a>: Mapping of """+self.name+""" """+subcomponent_name + \ """ Core Concepts to types and relations \ and PROV core concepts</caption> \ <tbody> <tr> <td><b>"""+self.name+""" Concepts</b></td> <td><b>Types or Relation (PROV concepts)</b></td> <td><b>Name</b></td> </tr> """ self.text += """ <!-- HERE ------------- Beginning of PROV Entities ------------- --> """ for prov_class in list([ PROV['Agent'], PROV['Activity'], PROV['Entity']]): sorted_classes = sorted(classes[prov_class]) for class_uri in sorted_classes: self.text += """ <tr> <td>"""+self.term_link(class_uri)+""" </td> """ # First iteration if class_uri is sorted_classes[0]: self.text += """ <td rowspan=\""""+str(len(sorted_classes)) + \ """\" style="text-align: center;">""" + \ self.name+""" Types<br/> \ (PROV """ + \ self.owl.get_label(prov_class).replace('prov:', '') + \ """)</td> """ self.text += """ <td>"""+self.term_link(class_uri)+"""</td> </tr> """ self.text += """ </tbody> </table> </div>""" def create_title(self, title): self.text += """ <section> <h1>"""+title+"""</h1> """ self.section_open += 1 def _format_markdown(self, text): # Replace links specified in markdown by html match = re.search(r'\[(?P<name>.*)\]\((?P<link>.*)\)', text) if match: text = text.replace( match.group(), '<a href="'+match.group('link')+'">' + match.group('name')+'</a>') return text def format_definition(self, definition): # Capitalize first letter of definition if definition: definition = definition[0].upper() + definition[1:] definition = self._format_markdown(definition) return definition def linked_listing(self, uri_list, prefix="", suffix=""): linked_listing = prefix for i, uri in enumerate(self.owl.sorted_by_labels(uri_list)): if i == 0: sep = "" elif i == len(uri_list): sep = " and " else: sep = ", " linked_listing += sep+self.term_link(uri) return linked_listing+suffix def term_link(self, term_uri, tag="a", text=None): href = "" if self.owl.is_external_namespace(term_uri): href = " href =\""+str(term_uri)+"\"" if text is None: text = self.owl.get_label(term_uri) term_link = "<" + tag + " title=\"" + self.owl.get_name(term_uri) + \ "\"" + href + ">" + text+"</"+tag+">" # # This could be handled by Respec, here we overwrite the id and href # # fields in order to be able to have an id that is not generated from # # the title field. e.g. title = nidm_0000001 (nidm:Map) and # # id = nidm_0000001 # name_lw = self.owl.get_name(term_uri).lower() # if tag is "dfn": # link_info = " id=\"dfn-" + name_lw + "\"" # elif tag is "a": # link_info = " href=\"#dfn-" + name_lw + "\"" # term_link = "<" + tag + link_info + \ # " class=\"internalDFN\"" + \ # " title=\"" + self.owl.get_name(term_uri) + \ # " (" + self.owl.get_label(term_uri) + ")" + \ # "\"" + href + ">" + text + "</" + tag + ">" if tag is "dfn": # Add link to current definition term_link = self.term_link(term_uri, text=term_link) return term_link def create_class_section(self, class_uri, definition, attributes, used_by=None, generated_by=None, derived_from=None, children=False): class_label = self.owl.get_label(class_uri) class_name = self.owl.get_name(class_uri) definition = self.format_definition(definition) self.text += """ <!-- """+class_label+""" ("""+class_name+""")"""+""" --> <section id="section-"""+class_label+""""> <h1 label=\""""+class_name+"""\">"""+class_label+"""</h1> <div class="glossary-ref"> """+self.term_link(class_uri, "dfn") + ": " + definition self.text += " "+self.term_link(class_uri)+" is" prov_class = self.owl.get_prov_class(class_uri) if prov_class: self.text += " a "+self.owl.get_label(prov_class) found_used_by = False if used_by: if class_uri in used_by: self.text += self.linked_listing(used_by[class_uri], " used by ") found_used_by = True used_entities = list() for used_entity, used_activities in used_by.items(): for used_act in used_activities: if used_act == class_uri: used_entities.append(used_entity) if used_entities: self.text += self.linked_listing(used_entities, " that uses ", " entities") found_generated_by = False if generated_by: if class_uri in generated_by: if found_used_by: self.text += " and " self.text += self.linked_listing( list([generated_by[class_uri]]), " generated by ") found_generated_by = True if class_uri in generated_by.values(): generated_entities = list() for generated_entity, generated_act in generated_by.items(): if generated_act == class_uri: generated_entities.append(generated_entity) if generated_entities: self.text += self.linked_listing( generated_entities, ". This activity generates ", " entities") if derived_from: if class_uri in derived_from: if found_used_by or found_generated_by: self.text += " and " self.text += self.linked_listing( list([derived_from[class_uri]]), " derived from ") self.text += "." range_classes = list() if attributes and (attributes != set([CRYPTO['sha512']])): self.text += """ </div> <p></p> <div class="attributes" id="attributes-"""+class_label + \ """"> A """ + \ self.term_link(class_uri)+""" has attributes: <ul> <li><span class="attribute" id=\"""" + \ class_label+""".label">rdfs:label</span>: \ (<em class="rfc2119" title="OPTIONAL">OPTIONAL</em>) """\ """Human readable description of the """ + \ self.term_link(class_uri)+""".</li>""" for att in sorted(attributes): # Do not display prov relations (used, wasGeneratedBy...) as # attributes if not self.owl.get_label(att).startswith("prov"): if att not in self.attributes_done: # First definition of this attribute att_tag = "dfn" else: att_tag = "a" self.attributes_done.add(att) # if att_label.startswith("nidm:"): att_def = self.owl.get_definition(att) self.text += """ <li>"""+self.term_link(att, att_tag) + \ '</span>: (<em class="rfc2119" title="OPTIONAL">' + \ 'OPTIONAL</em>) ' + self.format_definition(att_def) if att in self.owl.parent_ranges: child_ranges = list() for parent_range in self.owl.parent_ranges[att]: child_ranges += self.owl.get_direct_children( parent_range) child_ranges = sorted(child_ranges) # if nidm_namespace: child_range_txt = "" if child_ranges: # Get all child ranges child_range_txt = self.linked_listing( child_ranges, " such as ") self.text += self.linked_listing( self.owl.parent_ranges[att], " (range ", child_range_txt+")") self.text += "." for range_class in sorted(self.owl.ranges[att]): if self.owl.get_label(range_class).\ startswith('nidm'): range_classes.append(range_class) self.text += "</li>" BASE_REPOSITORY = "https://raw.githubusercontent.com/" + \ "incf-nidash/nidm/master/" examples = self.owl.get_example(class_uri, BASE_REPOSITORY) for example in sorted(examples): self.text += """ </ul> </div> <pre class='example highlight'>"""+cgi.escape(example) + \ """</pre>""" for range_name in range_classes: if not range_name in self.already_defined_classes: self.already_defined_classes.append(range_name) self.create_class_section( range_name, self.owl.get_definition(range_name), self.owl.attributes.setdefault(range_name, None), children=True) # For object property list also children (in sub-sections) if children: direct_children = self.owl.sorted_by_labels( self.owl.get_direct_children(class_uri)) for child in direct_children: if not child in self.already_defined_classes: self.create_class_section( child, self.owl.get_definition(child), self.owl.attributes.setdefault(child, None), children=True) self.already_defined_classes.append(child) # Display individuals individuals = self.owl.sorted_by_labels( self.owl.get_individuals(class_uri)) if individuals: self.text += \ " Examples of "+self.term_link(class_uri)+" includes " + \ "<ul>" for indiv in individuals: self.text += "<li>" + self.term_link(indiv, "dfn") + ": " + \ self.format_definition( self.owl.get_definition(indiv)) + \ "</li>" self.text += "</ul>" self.text += """ </section>""" def close_sections(self): for x in range(0, self.section_open): self.text += "\t"*x+"</section>\n" # Write out specification def write_specification(self, spec_file=None, component=None, version=None): if component and version: spec_file = os.path.join(DOC_FOLDER, component+"_"+version+".html") spec_open = codecs.open(spec_file, 'w', "utf-8") spec_open.write(self.text) spec_open.close() def _header_footer(self, prev_file=None, follow_file=None, component=None, version=None): release_notes = None if component: prev_file = os.path.join(INCLUDE_FOLDER, component+"_head.html") follow_file = os.path.join(INCLUDE_FOLDER, component+"_foot.html") if version: release_notes = os.path.join( os.path.dirname(self.owl.file), component+"_"+version+"_notes.html") if not os.path.isfile(release_notes): release_notes = None if prev_file is not None: prev_file_open = open(prev_file, 'r') self.text = prev_file_open.read().decode('utf-8')+self.text prev_file_open.close() if release_notes is not None: release_note_open = open(release_notes, 'r') self.text = self.text+release_note_open.read() release_note_open.close() if follow_file is not None: follow_file_open = open(follow_file, 'r') self.text = self.text+follow_file_open.read() follow_file_open.close()
class NIDMObjectsUnitTesting(unittest.TestCase): """ Unit testing of NIDM objects (compared to examples provided in nidm-results.owl) """ def setUp(self): self.export_dir = os.path.join(TEST_FOLDER, 'nidm') if not os.path.isdir(self.export_dir): os.mkdir(self.export_dir) # Retreive owl file for NIDM-Results owl_file = os.path.join(TERM_RESULTS_DIR, 'nidm-results.owl') assert owl_file self.owl = OwlReader(owl_file) self.doc = ProvDocument() # self.bundle = ProvBundle(identifier=NIIRI[software_lc+'_results_id']) self.provn_file = os.path.join(self.export_dir, 'unit_test.provn') namespaces_file = os.path.join(TERM_RESULTS_DIR, "templates", \ "Namespaces.txt") namespaces_fid = open(namespaces_file) self.prefixes = namespaces_fid.read() namespaces_fid.close() self.to_delete_files = [self.provn_file] self.gt_ttl_files = list() def test_design_matrix(self): mat = np.matrix('1 2; 3 4') mat_image = os.path.join(os.path.dirname(TEST_FOLDER), "data", \ "fmri.feat", "design.png") design_matrix = DesignMatrix(mat, mat_image, self.export_dir) self.doc.update(design_matrix.export()) # In the FSL export the design matrix contains both the Design Matrix # entity and the Image entity representing the design matrix # visualisation. self.to_delete_files.append(os.path.join(self.export_dir, \ "DesignMatrix.csv")) self.to_delete_files.append(os.path.join(self.export_dir, \ "DesignMatrix.png")) gt_file = self.owl.get_example(NIDM['DesignMatrix']) self.gt_ttl_files = [os.path.join(TERM_RESULTS_DIR, \ gt_file.replace("file://./", "")), os.path.join(TERM_RESULTS_DIR, "examples", "Image-DesignMatrix.txt")] self._create_gt_and_compare("Design Matrix") def test_data(self): data = Data(grand_mean_scaling=True, target=100.0) self.doc.update(data.export()) gt_file = self.owl.get_example(NIDM['Data']) self.gt_ttl_files.append(os.path.join(TERM_RESULTS_DIR, \ gt_file.replace("file://./", ""))) self._create_gt_and_compare("Data") # INDEPEDENT_CORR = NIDM['IndependentError'] # SERIALLY_CORR = NIDM['SeriallyCorrelatedError'] # COMPOUND_SYMMETRY_CORR = NIDM['CompoundSymmetricError'] # ARBITRARILY_CORR = NIDM['ArbitriralyCorrelatedError'] # def test_error_model_indepdt_global(self): # error_distribution = GAUSSIAN_DISTRIBUTION # variance_homo = True # variance_spatial = SPATIALLY_GLOBAL # dependance = INDEPEDENT_CORR # dependance_spatial = SPATIALLY_GLOBAL # error_model = ErrorModel(error_distribution, variance_homo, # variance_spatial, dependance, dependance_spatial) # self.doc.update(error_model.export()) # nidm_classes = { # "ErrorModel": dict( # error_model_id="niiri:error_model_id", # noise_distribution="nidm:GaussianDistribution", # variance_homo="true", # variance_spatial="nidm:SpatiallyGlobal", # dependence="nidm:IndependentError", # dependence_spatial="nidm:SpatiallyLocal" # ) # } # self._create_gt_and_compare(nidm_classes, "Data") def _create_gt_and_compare(self, class_name): # Write-out current example in a provn file and convert to turtle provn_fid = open(self.provn_file, 'w') provn_fid.write(self.doc.get_provn()) provn_fid.close() ttl_file = self.provn_file.replace(".provn", ".ttl") call("provconvert -infile "+self.provn_file+" -outfile "+ttl_file, \ shell=True) self.to_delete_files.append(ttl_file) # Load current example graph ex_graph = Graph() ex_graph.parse(source=ttl_file, format='turtle') # Read and concatenate ground truth files gt = "" for gt_ttl_file in self.gt_ttl_files: gt_fid = open(gt_ttl_file) # What is described in the examples to be at any path is relative # in export gt = gt.replace("/path/to/", "./") gt = gt+gt_fid.read() gt_fid.close() gt_graph = Graph() gt = self.prefixes+gt gt_graph.parse(data=gt, format='turtle') # Compare graphs found_diff = compare_graphs(ex_graph, gt_graph) if found_diff: raise Exception("Difference in "+class_name+".") def tearDown(self): # Delete files created for testing for to_delete_file in self.to_delete_files: if os.path.isfile(to_delete_file): os.remove(to_delete_file) os.rmdir(self.export_dir)
class TestExamples(unittest.TestCase): def __init__(self, *args, **kwargs): super(TestExamples, self).__init__(*args, **kwargs) namespaces_def = os.path.join(RELPATH, "terms", "templates", 'Namespaces.txt') fid = open(namespaces_def, "r") namespaces = fid.read() fid.close() self.term_examples = glob.glob( os.path.join(RELPATH, "terms", "examples", '*.txt')) self.example_files = example_filenames.union(self.term_examples) self.examples = dict() self.owl_files = dict() for example_file in self.example_files: provn_file = os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), example_file) # ttl_file_url = get_turtle(provn_file) ttl_file = provn_file.replace(".provn", ".ttl") # Read turtle self.examples[example_file] = Graph() if example_file in self.term_examples: fid = open(ttl_file, "r") ttl_txt = fid.read() fid.close() self.examples[example_file].parse(data=namespaces + ttl_txt, format='turtle') else: self.examples[example_file].parse(ttl_file, format='turtle') term_dir = os.path.join(os.path.dirname(ttl_file), os.pardir, 'terms') if not os.path.isdir(term_dir): term_dir = os.path.join(os.path.dirname(ttl_file), os.pardir, os.pardir, 'terms') # Retreive owl file for minimal examples if not os.path.isdir(term_dir): term_dir = os.path.join(os.path.dirname(ttl_file), os.pardir, os.pardir, os.pardir, 'terms') owl_files = glob.glob(os.path.join(term_dir, '*.owl')) self.owl_files[example_file] = owl_files[0] self.owl_readers = dict() def _load_owl(self, owl_file): if owl_file in self.owl_readers: self.owl = self.owl_readers[owl_file] else: # Retreive owl file for NIDM-Results # owl_file = os.path.join(RELPATH, 'terms', 'nidm-results.owl') # check the file exists assert os.path.exists(owl_file) # Read owl (turtle) file owl_path = os.path.dirname(owl_file) if not "extension" in os.path.dirname(owl_file): import_files = glob.glob(os.path.join(owl_path, \ os.pardir, os.pardir, "imports", '*.ttl')) else: import_files = glob.glob(os.path.join(owl_path, \ os.pardir, os.pardir, os.pardir, os.pardir, "imports", '*.ttl')) # Main ontology file import_files += glob.glob(os.path.join(owl_path, \ os.pardir, os.pardir, os.pardir, "terms", '*.owl')) self.owl = OwlReader(owl_file, import_files) self.owl_readers[owl_file] = self.owl def test_check_classes(self): logger.info("TestExamples: test_check_classes") my_exception = dict() for example_file in self.example_files: example_name = example_file example_graph = self.examples[example_file] owl = self.owl_files[example_file] self._load_owl(owl) # Check that all entity, activity, agent are defined in the data model exception_msg = self.owl.check_class_names(example_graph, example_name) my_exception = merge_exception_dict(my_exception, exception_msg) # Aggredate errors over examples for conciseness if my_exception: error_msg = "" for unrecognised_class_name, examples in my_exception.items(): error_msg += unrecognised_class_name + " (from " + ', '.join( examples) + ")" raise Exception(error_msg) def test_check_attributes(self): logger.info("TestExamples: test_check_attributes") my_exception = dict() my_range_exception = dict() my_restriction_exception = dict() for example_file in self.example_files: example_name = example_file example_graph = self.examples[example_file] owl = self.owl_files[example_file] self._load_owl(owl) exception_msg = self.owl.check_attributes(example_graph, example_name) my_exception = merge_exception_dict(my_exception, exception_msg[0]) if not example_file in self.term_examples: my_range_exception = merge_exception_dict( my_range_exception, exception_msg[1]) else: # Ignore range exceptions for test examples (as for object # properties the linked object will be missing) my_range_exception = dict() my_restriction_exception = merge_exception_dict( my_restriction_exception, exception_msg[2]) # Aggregate errors over examples for conciseness error_msg = "" for found_exception in list( [my_exception, my_range_exception, my_restriction_exception]): if found_exception: for unrecognised_attribute, example_names in found_exception.items( ): error_msg += unrecognised_attribute + " (from " + ', '.join( example_names) + ")" # if my_range_exception: # for unrecognised_range, example_names in my_range_exception.items(): # error_msg += unrecognised_range+" (from "+', '.join(example_names)+")" if error_msg: raise Exception(error_msg)
def __init__(self): self.rdrOwl = OwlReader()
class MemoryCondenser: def __init__(self, parent=None): self.rdrOwl = OwlReader() self.rdrDesig = DesignatorReader() self.arrExperiences = [] self.knownTaskTypes = [] def countExperiences(self): return len(self.arrExperiences) def addExperience(self, expAdd): self.arrExperiences.append(expAdd) def loadExperience(self, strOwlFile, strDesignatorFile): logReturn = Log() logReturn.setOwlData(self.rdrOwl.loadOwl(strOwlFile)) if strDesignatorFile != "": logReturn.setDesignatorData(self.rdrDesig.loadDesignators(strDesignatorFile)) self.addExperience(logReturn) def durations_for_task_type(self, data, task_type): collected_durations = [] for key in data: if key == task_type: collected_durations += data[key]["durations"] collected_durations += self.durations_for_task_type(data[key]["children"], task_type) return collected_durations def condenseData(self, dataOwl): result = None self.tti = dataOwl["task-tree-individuals"] owlMeta = dataOwl["metadata"] owlAnnot = dataOwl["annotation"] if owlMeta: result = {"Toplevel" : self.condenseNodes("", owlMeta.subActions())}; else: print "No meta data in file!" return result def condenseNodes(self, strParentNode, arrNodes, nLevel = 0): arrTypes = {} arrIndividuals = {} for strNode in arrNodes: owlNode = self.tti[strNode] ident = owlNode.taskContext() failures = owlNode.failures() failure = "" if len(failures) > 0: failure = self.tti[failures[0]].type() result = self.condenseNodes(strNode, owlNode.subActions(), nLevel + 1) if not ident in arrTypes: arrTypes[ident] = result else: arrTypes[ident] = self.unifyResults(arrTypes[ident], result) arrTypes[ident]["individuals"][strNode] = {"parameters" : owlNode.annotatedParameters(True), "parent" : strParentNode, "failure" : failure} return {"subTypes" : arrTypes, "individuals" : {}} def unifyResults(self, res1, res2): resparams = {} if len(res1["individuals"]) > 0: resparams = res1["individuals"] if len(res2["individuals"]) > 0: resparams = dict(resparams.items() + res2["individuals"].items()) unified = {"subTypes" : {}, "individuals" : resparams} for ressub1 in res1["subTypes"]: if ressub1 in res2["subTypes"]: unified["subTypes"][ressub1] = self.unifyResults(res1["subTypes"][ressub1], res2["subTypes"][ressub1]) else: unified["subTypes"][ressub1] = res1["subTypes"][ressub1] for ressub2 in res2["subTypes"]: if not ressub2 in res1["subTypes"]: unified["subTypes"][ressub2] = res2["subTypes"][ressub2] return unified def condense(self): arrStartNodes = [] self.tti = {} for experience in self.arrExperiences: owlData = experience.getOwlData() metaData = owlData["metadata"] arrStartNodes += metaData.subActions() self.tti.update(owlData["task-tree-individuals"]) self.processed_nodes = [] tree = self.condenseNodesByContext(arrStartNodes) parameters = {} for node in self.processed_nodes: params = self.tti[node].annotatedParameters(bSingularParameters = True) if len(params) > 0: parameters[node] = {} for p in params: if not p == "_time_created": parameters[node][p.lstrip("parameter-")] = params[p] result = {"tree": tree, "parameters": parameters} print result def sortComparatorActionTime(self, action1, action2): if action1.timeSpan[0] > action2.timeSpan[0]: return 1 elif action1.timeSpan[0] == action2.timeSpan[0]: return 0 else: return -1 def sortActionsByTime(self, actions): actions.sort(self.sortComparatorActionTime) return actions def condenseNodesByContext(self, nodes): # Separate nodes by their taskContext dicContexts = {} for node in nodes: self.processed_nodes.append(node) owlNode = self.tti[node] if not owlNode.taskContext() in dicContexts: dicContexts[owlNode.taskContext()] = {"nodes": [], "terminal-state": []} dicContexts[owlNode.taskContext()]["nodes"].append(node) for context in dicContexts: all_children = [] for node in dicContexts[context]["nodes"]: sub_actions = self.sortActionsByTime(self.tti[node].subActions()) if len(sub_actions) > 0: all_children += sub_actions else: dicContexts[context]["terminal-state"].append(node) dicContexts[context]["children"] = self.condenseNodesByContext(all_children) return dicContexts def generalizeExperiences(self): self.generalizedExperience = {} self.tti = {} arrStartNodes = [] for experience in self.arrExperiences: owlData = experience.getOwlData() metaData = owlData["metadata"] arrStartNodes += metaData.subActions() self.tti.update(owlData["task-tree-individuals"]) for node in arrStartNodes: self.injectActionIntoGeneralizedExperience(node, self.generalizedExperience) print self.generalizedExperience def injectActionIntoGeneralizedExperience(self, action, target_branch): target_branch["a"] = 5 def dotNode(self, node, first): dot = "" tti = self.t_tti[node] former_subnode = "" dot += " {rank=same;" for subnode in tti.subActions(): dot += " " +subnode dot += "}\n" for subnode in tti.subActions(): param_line = "" for param in self.t_tti[subnode].annotatedParameters(): if param != "_time_created" and param != "CALLPATTERN": value = self.t_tti[subnode].tagNodeValues("knowrob:" + param)[0] param_line += param + " = " + value + "\n" dot += " " + subnode + " [shape=box, label=\"" + self.t_tti[subnode].taskContext() + " (" + str(round(self.t_tti[subnode].time(), 3)) + "s)\n" + param_line + "\"]\n" if first == True: first = False dot += "edge [dir=both, arrowhead=normal, arrowtail=none]" dot += "\n " + node + " -> " + subnode + "\n" if not former_subnode == "": dot += " edge [arrowhead=empty, arrowtail=none]\n" dot += " " + former_subnode + " -> " + subnode + "\n" dot += self.dotNode(subnode, True) former_subnode = subnode return dot def printExperiences(self, dot): for experience in self.arrExperiences: if dot: self.printDotExperience(experience) else: self.printRawExperience(experience) def printRawExperience(self, experience): owlData = experience.getOwlData() metaData = owlData["metadata"] start_nodes = metaData.subActions() self.t_tti = owlData["task-tree-individuals"] for node in start_nodes: self.printRawExperienceNode(node) def printRawExperienceNode(self, node, level = 0): indent = " " * level owl = self.t_tti[node] parameters = owl.annotatedParameters() param_str = "(" first = True for parameter in parameters: if not parameter == "_time_created": if first == True: first = False else: param_str = param_str + ", " key_str = parameter[10:] if parameter[:10] == "parameter-" else parameter param_str = param_str + key_str + "=" + parameters[parameter][0] param_str = param_str + ")" print indent + owl.taskContext() + " " + param_str if len(owl.subActions()) > 0: for node in owl.subActions(): self.printRawExperienceNode(node, level + 1) def printDotExperience(self, experience): owlData = experience.getOwlData() metaData = owlData["metadata"] start_nodes = metaData.subActions() self.t_tti = owlData["task-tree-individuals"] dot = "digraph plangraph {\n" dot += " label=\"Original Experiences\"\n" dot += " labeljust=center\n" dot += " labelloc=top\n" for node in start_nodes: dot += " " + node + " [shape=box, label=\"" + self.t_tti[node].taskContext() + "\"]\n\n" dot += self.dotNode(node, True) dot += "}\n" print dot def compareSubActions(self, subaction1, subaction2): if subaction1 == subaction2: return 0 next_action = subaction1 while next_action != None: next_action = self.tti[subaction1].nextAction() if next_action == subaction2: return 1 return -1 def sortSubActions(self, subactions): subactions.sort(self.compareSubActions) return subactions def sortSubActionsList(self, subactions_list): sorted_list = [] for subactions in subactions_list: sorted_list.append(self.sortSubActions(subactions)) return sorted_list def generalizeNodes(self, nodes): sequences = [] for node in nodes: sequences.append(self.tti[node].subActions()) return sequences def workOnExperiences(self): start_nodes = [] self.tti = {} for experience in self.arrExperiences: owlData = experience.getOwlData() metaData = owlData["metadata"] start_nodes += metaData.subActions() self.tti.update(owlData["task-tree-individuals"]) print self.generalizeNodes(start_nodes) def injectExperiences(self, deduced = False, data = False): self.arrInjected = {} self.tti = {} self.uid_counter = 0; root_action_count = 0 for experience in self.arrExperiences: owlData = experience.getOwlData() metaData = owlData["metadata"] self.tti.update(owlData["task-tree-individuals"]) for node in metaData.subActions(): self.injectExperienceNode(node, self.arrInjected, True) root_action_count = root_action_count + 1 for experience in self.arrExperiences: owlData = experience.getOwlData() metaData = owlData["metadata"] self.tti.update(owlData["task-tree-individuals"]) for node in metaData.subActions(): self.checkForTerminalStateOccurrences(self.tti[node].taskContext(), self.arrInjected) for node in metaData.subActions(): self.checkForOptionalInjectedNodes(self.tti[node].taskContext(), self.arrInjected) self.taskDurations = {} for task_type in self.knownTaskTypes: self.taskDurations[task_type] = self.durations_for_task_type(self.arrInjected, task_type) if deduced: self.printDeduced(dot = not data, root_action_count = root_action_count) else: self.printInjected(dot = not data) def getParameters(self, node): params_fixed = {} call_pattern = "" params = self.tti[node].annotatedParameters() for param in params: if not param == "_time_created" and not param == "CALLPATTERN": key_str = param[10:] if param[:10] == "parameter-" else param params_fixed[key_str] = params[param][0] elif param == "CALLPATTERN": call_pattern = params[param][0] return (call_pattern, params_fixed) def emptyContext(self, invocation_path, call_pattern): ctx = {"children": {}, "next-actions" : {}, "uid" : self.uid_counter, "terminal-state": "false", "start-state": "false", "optional": "false", "instances": 0, "durations": [], "outcomes": {}, "invocations": [invocation_path], "call-pattern": call_pattern} self.uid_counter = self.uid_counter + 1 return ctx def updateFrameContext(self, node, frame, invocation_path, previous_ctx): ctx = self.tti[node].taskContext() if ctx != previous_ctx: (call_pattern, params) = self.getParameters(node) if not ctx in frame: invocation_path.update({self.uid_counter: params}) frame[ctx] = self.emptyContext(invocation_path, call_pattern) else: invocation_path.update({frame[ctx]["uid"]: params}) frame[ctx]["invocations"].append(invocation_path) if frame[ctx]["start-state"] == "false": if not self.tti[node].previousAction(): frame[ctx]["start-state"] = "true" frame[ctx]["instances"] += 1 frame[ctx]["durations"].append(self.tti[node].time()) failures = self.tti[node].failures() outcome = "" if len(failures) > 0: outcome = self.tti[failures[0]].tagNodeValues("rdfs:label")[0].split(":")[1] else: outcome = "SUCCESS" if not outcome in frame[ctx]["outcomes"]: frame[ctx]["outcomes"][outcome] = 0 frame[ctx]["outcomes"][outcome] += 1 if previous_ctx: if not ctx in frame[previous_ctx]["next-actions"]: frame[previous_ctx]["next-actions"][ctx] = [] frame[previous_ctx]["next-actions"][ctx].append(params) def injectExperienceNode(self, node, frame, rootlevel = False, invocation_path = {}, previous_node = None): ctx = self.tti[node].taskContext() new_invocation_path = invocation_path.copy() if not ctx in self.knownTaskTypes: self.knownTaskTypes.append(ctx) self.updateFrameContext(node, frame, new_invocation_path, self.tti[previous_node].taskContext() if previous_node else None) num_children = len(frame[ctx]["children"]) num_siblings = len(frame[ctx]["next-actions"]) for sub in self.tti[node].subActions(): if not self.tti[sub].previousAction(): self.injectExperienceNode(sub, frame[ctx]["children"], False, new_invocation_path) next_node = self.tti[node].nextAction() if next_node and not rootlevel and not ctx == self.tti[next_node].taskContext(): self.injectExperienceNode(next_node, frame, False, new_invocation_path, node) def checkForOptionalInjectedNodes(self, ctx, frame, parent_instances = -1, came_from = None): if not "check-optional" in frame[ctx]: frame[ctx]["check-optional"] = "done" frame[ctx]["optional"] = "false" came_from_terminates = False came_from_valid = True if came_from: if came_from == ctx: came_from_valid = False if frame[came_from]["terminal-state"] == "true": came_from_terminates = True if came_from_valid == True: if frame[ctx]["instances"] < parent_instances: frame[ctx]["optional"] = "true" for child in frame[ctx]["children"]: if frame[ctx]["children"][child]["start-state"] == "true": self.checkForOptionalInjectedNodes(child, frame[ctx]["children"], frame[ctx]["instances"]) for next_action in frame[ctx]["next-actions"]: if next_action in frame and not next_action == ctx: self.checkForOptionalInjectedNodes(next_action, frame, frame[ctx]["instances"], ctx) def checkForTerminalStateOccurrences(self, ctx, frame): if not "check-terminal" in frame[ctx]: frame[ctx]["check-terminal"] = "done" frame[ctx]["terminal-state"] = "true" child_instances = 0 next_instances = 0 for child in frame[ctx]["children"]: if frame[ctx]["children"][child]["start-state"] == "true": child_instances = child_instances + frame[ctx]["children"][child]["instances"] for next_action in frame[ctx]["next-actions"]: if next_action in frame and not next_action == ctx: next_instances = next_instances + frame[next_action]["instances"] terminal_instances = frame[ctx]["instances"] - (child_instances + next_instances) if terminal_instances > 0: frame[ctx]["terminal-instances"] = terminal_instances else: frame[ctx]["terminal-instances"] = 0 for child in frame[ctx]["children"]: self.checkForTerminalStateOccurrences(child, frame[ctx]["children"]) for next_action in frame[ctx]["next-actions"]: if next_action in frame and not next_action == ctx: self.checkForTerminalStateOccurrences(next_action, frame) def printDeduced(self, dot = False, root_action_count = 1): # TODO: Extend this to use all top-level nodes in case they # are different self.global_ctx_counter = 0 deduced = self.expandPathways(self.arrInjected.keys()[0], self.arrInjected, root_action_count) fixed_deduced = [] for d in deduced: # Go down two levels fixed_singular = d["child"]["child"] for invocation in fixed_singular["invocations"]: invocation.pop(0, 0) invocation.pop(1, 0) fixed_deduced.append(fixed_singular) with open("deduced_experiences.json", "w") as f: json.dump(fixed_deduced, f) if dot: self.printDotDeduced(deduced) else: print deduced def allPossibleCombinations(self, n, static_indices): solutions = [] prototype = [] for i in range(n): prototype.append(1 if i in static_indices else 0) run = True while run: prototype[0] = prototype[0] + 1 for i in range(n): if prototype[i] > 1: prototype[i] = 1 if i in static_indices else 0 if i < n - 1: prototype[i + 1] = prototype[i + 1] + 1 else: run = False solutions.append(copy.deepcopy(prototype)) return solutions def allPossiblePermutationIndices(self, lengths): indices = [] for i in range(len(lengths)): indices.append(0) solutions = [] if len(indices) > 0: run = True while run: indices[0] = indices[0] + 1 for i in range(len(lengths)): if indices[i] >= lengths[i]: indices[i] = 0 if i == len(lengths) - 1: run = False else: indices[i + 1] = indices[i + 1] + 1 solutions.append(copy.deepcopy(indices)) return solutions def allPossiblePermutations(self, objects): lengths = [] for object in objects: lengths.append(len(object)) indices = self.allPossiblePermutationIndices(lengths) solutions = [] for index in indices: solution = [] for i in range(len(objects)): solution.append(objects[i][index[i]]) solutions.append(copy.deepcopy(solution)) return solutions def allPossibleSingularPermutations(self, objects, static_indices): objects_extended = [] for i in range(len(objects)): add = [object, None] if not i in static_indices else [object] objects_extended.append(add) solutions_pre = self.allPossiblePermutations(objects_extended) solutions = [] for solution_pre in solutions_pre: for bit in solution_pre: if bit != None: solutions.append(bit) return solutions def expandPaths(self, ctx, nodes, trace): paths = [] node = nodes[ctx] if not node["uid"] in trace: ci_n = len(node["durations"]) ci_N = len(self.taskDurations[ctx]) # https://www.stat.tamu.edu/~lzhou/stat302/standardnormaltable.pdf ci_alpha = 0.05 # Not really used, more for reference ci_z = 1.96 # From standard distribution table for alpha = 0.5 (z_{1-alpha/2}) ci_x_bar = sum(node["durations"]) / float(ci_n) ci_s_square = 0 for i in range(ci_n): ci_s_square += (node["durations"][i] - ci_x_bar) * (node["durations"][i] - ci_x_bar) if ci_n != 1: ci_s_square /= (ci_n - 1) else: ci_s_square /= ci_n # Foo foo ci_sigma_hat_x_bar_pre = (ci_s_square / ci_n) * (1 - ci_n / ci_N) if abs(ci_sigma_hat_x_bar_pre) > 0: ci_sigma_hat_x_bar = math.sqrt(ci_sigma_hat_x_bar_pre) else: ci_sigma_hat_x_bar = 0 ci = [ci_x_bar - ci_z * ci_sigma_hat_x_bar, ci_x_bar + ci_z * ci_sigma_hat_x_bar] node_desc = {"node": node["uid"], "name": ctx, "durations": node["durations"], "outcomes": node["outcomes"], "optional": node["optional"], "duration-confidence": ci, "invocations": [], "instances": node["instances"], "call-pattern": node["call-pattern"], "theoretical": "false"} current_trace = copy.deepcopy(trace) + [node_desc["node"]] for invocation in node["invocations"]: invocation_fits = True for uid in invocation: if not uid in current_trace: invocation_fits = False break if invocation_fits: node_desc["invocations"].append(invocation) if len(node_desc["invocations"]) == 0: node_desc["theoretical"] = "true" child_paths = [] for child in self.getStartNodes(node["children"]): expanded_child_paths = self.expandPaths(child, node["children"], current_trace) child_paths += expanded_child_paths next_action_paths = [] for next_action in node["next-actions"]: if next_action != ctx: expanded_next_action_paths = self.expandPaths(next_action, nodes, current_trace) next_action_paths += expanded_next_action_paths if len(child_paths) > 0 and len(next_action_paths) > 0: children_optional = True for child_path in child_paths: current_child_copy = copy.deepcopy(node_desc) current_child_copy["child"] = child_path if child_path["optional"] != "true": children_optional = False next_actions_optional = True for next_action in next_action_paths: current_child_next_copy = copy.deepcopy(current_child_copy) current_child_next_copy["next-action"] = next_action if next_action["optional"] != "true": next_actions_optional = False paths.append(current_child_next_copy) if next_actions_optional: paths.append(current_child_copy) if children_optional: paths.append(node_desc) elif len(child_paths) > 0: children_optional = True for child_path in child_paths: current_child_copy = copy.deepcopy(node_desc) current_child_copy["child"] = child_path if child_path["optional"] != "true": children_optional = False paths.append(current_child_copy) if children_optional: paths.append(node_desc) elif len(next_action_paths) > 0: next_actions_optional = True for next_action in next_action_paths: current_next_copy = copy.deepcopy(node_desc) current_next_copy["next-action"] = next_action if next_action["optional"] != "true": next_actions_optional = False paths.append(current_next_copy) if next_actions_optional: paths.append(node_desc) else: paths.append(node_desc) return paths def printPath(self, path, indentation = "", is_next = False): sys.stdout.write(indentation + str(path["node"])) if "next-action" in path: self.printPath(path["next-action"], " ", True) if not is_next: sys.stdout.write("\n") if "child" in path: if is_next: sys.stdout.write(" \n") self.printPath(path["child"], " " if is_next else "") def expandPathways(self, ctx, nodes, root_action_count, trace = [], relation = "root", correspondant = ""): pathways = self.expandPaths(ctx, nodes, []) return pathways def getStartNodes(self, nodes): start_nodes = {} for node in nodes: if nodes[node]["start-state"] == "true": start_nodes[node] = nodes[node] return start_nodes def printInjected(self, dot = False): if dot: self.printInjectedDot() else: print self.arrInjected def printInjectedChildren(self, children, parent = None): dot = "" edge_pointers = {} next_action_parameters = {} ids = {} optionals = {} parent_id = parent if not parent: parent_id = "root" for child in children: child_id = "node_" + child.replace("-", "_") + "_" + str(self.counterdot) ids[child] = child_id self.counterdot = self.counterdot + 1 label = child if label[:21] == "REPLACEABLE-FUNCTION-": label = label[21:] call_pattern = children[child]["call-pattern"] dot += " " + child_id + " [shape=box, label=\"" + label + " (" + str(children[child]["uid"]) + " / " + str(children[child]["instances"]) + ")\n" + call_pattern + "\"]\n" if children[child]["terminal-state"] == "true": if children[child]["terminal-instances"] > 0: dot += " ts_" + str(self.counterdot) + " [shape=doublecircle, label=\"" + str(children[child]["terminal-instances"]) + "\"]\n" dot += " edge [style=dashed, arrowhead=normal, arrowtail=none, label=\"terminal\"]\n" dot += " " + child_id + " -> " + "ts_" + str(self.counterdot) + "\n" dot += self.printInjectedChildren(children[child]["children"], child_id) if parent: if children[child]["start-state"] == "true": if children[child]["optional"] == "true": dot += " edge [style=solid, arrowhead=normal, arrowtail=none, label=\"optional\"]\n" else: dot += " edge [style=solid, arrowhead=normal, arrowtail=none, label=\"\"]\n" else: if children[child]["optional"] == "true": dot += " edge [style=dashed, arrowhead=none, arrowtail=none, label=\"\"]\n" else: dot += " edge [style=dashed, arrowhead=none, arrowtail=none, label=\"\"]\n" dot += " " + parent + " -> " + child_id + "\n" for na in children[child]["next-actions"]: if parent: if not na in edge_pointers: edge_pointers[na] = [] next_action_parameters[na] = {} if not child_id in next_action_parameters[na]: next_action_parameters[na][child_id] = [] edge_pointers[na].append(child_id) for child in children: child_id = ids[child] if child in edge_pointers: for target in edge_pointers[child]: param_str = "" # for param_sets in next_action_parameters[child][target]: # for param_set in param_sets: # first_p = True # for p in param_set: # if first_p: # first_p = False # else: # param_str = param_str + ", " # param_str = param_str + p + " = " + param_set[p] # param_str = param_str + "\\n" #if next_action_parameters[ if children[child]["optional"] == "true": param_str = "optional" else: param_str = "" dot += " {rank=same; " + child_id + " " + target + "}\n" dot += " edge [style=solid, arrowhead=empty, arrowtail=none, label=\"" + param_str + "\"]\n" dot += " " + target + " -> " + child_id + "\n" return dot def printInjectedDot(self): self.counterdot = 0 self.edge_pointers = {} dot = "digraph condensed {\n" dot += " graph []\n"#ranksep=0.5#nodesep=0.5#pad=0.5 dot += " label=\"Condensed Experience Graph\"\n" dot += " labeljust=center\n" dot += " labelloc=top\n" dot += self.printInjectedChildren(self.arrInjected) dot += "}\n" print dot def expScore(self, exp): acc_score = 1.0 for item in exp: instances = item["instances"] rel_occ = item["rel-occ"] acc_score = acc_score * rel_occ last_item = exp[len(exp) - 1] acc_score = acc_score * last_item["rel-term"] return acc_score def expScoreCmp(self, exp1, exp2): score1 = self.expScore(exp1) score2 = self.expScore(exp2) if score1 < score2: return 1 elif score1 > score2: return -1 else: return 0 def reconstructItem(self, root, sequential): if not "children" in root: root["children"] = [] if not "next" in root: root["next"] = None for item in sequential: #print item["correspondant"], root["uid"] if item["correspondant"] == root["uid"]: enriched_item = self.reconstructItem(item, sequential) if item["relation"] == "child": root["children"].append(enriched_item) elif item["relation"] == "sibling": root["next"] = enriched_item return root def reassambleStructure(self, sequential): root = None for item in sequential: #sys.stderr.write(str(item)) if item["relation"] == "root": root = item break if root: root = self.reconstructItem(root, sequential) return root def printDeducedPlan(self, plan, indentation): dot = "" this_node = "node_" + str(self.node_counter) name = (plan["name"][21:] if plan["name"][:21] == "REPLACEABLE-FUNCTION-" else plan["name"]).lower() line_name = name + " (ID " + str(plan["node"]) + "" + (", theoretical" if plan["theoretical"] == "true" else "") + "), " line_call_pattern = plan["call-pattern"] line_invocations = str(len(plan["invocations"])) + " invocation" + ("" if len(plan["invocations"]) == 1 else "s") line_name = line_name + ("" if line_name == "" else "\n") line_call_pattern = line_call_pattern + ("" if line_call_pattern == "" else "\n") line_invocations = line_invocations + ("" if line_invocations == "" else "\n") dot += indentation + this_node + " [shape=box, label=\"" + line_name + line_call_pattern + line_invocations + "\"];\n" self.node_counter = self.node_counter + 1 if "next-action" in plan: (dot_new, that_node) = self.printDeducedPlan(plan["next-action"], indentation) dot += dot_new dot += indentation + "{rank=same; " + this_node + " " + that_node + "};\n" dot += indentation + "edge [arrowhead=empty, arrowtail=none, label=\"sequence\"]\n" dot += indentation + this_node + " -> " + that_node + ";\n" if "child" in plan: (dot_new, that_node) = self.printDeducedPlan(plan["child"], indentation) dot += dot_new dot += indentation + "edge [arrowhead=normal, arrowtail=none, label=\"child\"]\n" dot += indentation + this_node + " -> " + that_node + ";\n" return (dot, this_node) def printDotDeduced(self, deduced): self.node_counter = 0 counter = 0 subgraphcounter = 0 #print len(deduced) #print deduced #exit(-1) dot = "digraph deduced {\n" dot += " label=\"Deduced Possible Action Paths\"\n" dot += " labeljust=center\n" dot += " labelloc=top\n" cluster_index = 0 for plan in deduced: dot += " \n" dot += " subgraph cluster_" + str(cluster_index) + " {\n" dot += " label=\"\";\n" (cluster, something) = self.printDeducedPlan(plan, " "); dot += cluster dot += " }\n" cluster_index = cluster_index + 1 dot += "}\n" print dot