def main(): #See https://github.com/Mckinsey666/bullet for improvements choice = '0' while choice == '0': print('\n') print("Menu: ") print("1: Generate excel file from AIRM xmi") print("2: Find AIRM element by URN") print("3: Generate JSON file from excel mapping") print("4: Create html pages from FIXM JSON file") print("5: Create zip file from html/export...") print("6: Compare AMXM other XMs") print("7: Compare FIXM other XMs") print("8: Create index from AMXM mapping") print("9: Create html index from FIXM mapping") print("10: Create html pages from FIXM mapping") print("11: Create html index for AIRM Advanced Viewer") print("12: Create html pages for AIRM Advanced Viewer") print("13: Create xml with connected index for Advanced Viewer") print("14: Create html pages from AMXM mapping") print("15: Create index from AIXM mapping") print("16: Create pages from AIXM mapping") print("17: VIEWER - CX - ABBREVIATIONS - Create index") print("18: VIEWER - CX - ABBREVIATIONS - Create pages") print("19: VIEWER - CX - TERMS - Create index") print("20: VIEWER - CX - TERMS - Create pages") print("21: VIEWER - CP - Create index") print("22: VIEWER - CP - Create pages") print("23: VIEWER - LOGICAL - Create index") print("24: VIEWER - LOGICAL - Create pages") print("25: Create index from AIXM ADR mapping") print("26: Create pages from AIXM ADR mapping") choice = input("Please make a choice: ") if choice == "1": print('\n') print("Generating excel file from AIRM xmi...") dataframes = airm_importer.import_xmi("data/xml/airm") main() elif choice == "2": #dataframes=airm.import_xmi("data/xml/airm") urn = input("provide a URN (type exit to go back to the menu)>") while urn != "exit": print(airm.load_and_find_urn(urn)) urn = input( "provide a URN (type exit to go back to the menu)>") main() elif choice == "3": print('\n') print("Generating JSON file from excel mapping...") xlsx2json.transform() main() elif choice == "4": print('\n') print("Creating html pages from JSON file...") json2html.create_html() main() elif choice == "5": print('\n') name = input( "How would you like to name the .zip file? (Do not use .zip as part of the name): " ) print("Creating zip file from html/export...") my_zip.compress("data/html/export", name) main() elif choice == "6": print('\n') compare.report() main() elif choice == "7": print('\n') compare_fixm.report() main() elif choice == "8": print('\n') print("Creating html index from xls file...") amxm2html.create_html() main() elif choice == "9": print('\n') print("Creating html pages from xls file...") fixm2html.create_html() main() elif choice == "10": print('\n') print("Creating html pages from xls file...") fixm2html.create_html_pages() main() elif choice == "11": print('\n') print("Creating html pages from xls file...") airm2html.create_html() main() elif choice == "12": print('\n') print("Creating html pages from xls file...") airm2html.create_html_pages() main() elif choice == "13": print('\n') print("Creating conncected index...") airm.create_connected_index() main() elif choice == "14": print('\n') print("Creating html pages...") amxm2html.create_html_pages() main() elif choice == "15": print('\n') print("Creating html pages from xls file...") import aixm2html aixm2html.create_html() print("Done") main() elif choice == "16": print('\n') print("Creating html pages from xls file...") import aixm2html aixm2html.create_html_pages() print("Done") main() elif choice == "17": print('\n') print( "VIEWER - CX - ABBREVIATIONS (Global) Creating index from xls file..." ) import airm2html airm2html.create_index_cx_abbs_global() print("Done") print( "VIEWER - CX - ABBREVIATIONS (Supps) Creating index from xls file..." ) airm2html.create_index_cx_abbs_supp() print("Done") main() elif choice == "18": print('\n') print( "VIEWER - CX - ABBREVIATIONS Creating pages from xls file...") import airm2html airm2html.create_pages_cx_abbs() print("Done") main() elif choice == "19": print('\n') print("VIEWER - CX - TERMS Creating index from xls file...") import airm2html airm2html.create_index_cx_terms_global() print("Done") print( "VIEWER - CX - TERMS (Supps) Creating index from xls file...") airm2html.create_index_cx_terms_supp() print("Done") main() elif choice == "20": print('\n') print("VIEWER - CX - TERMS Creating pages from xls file...") import airm2html airm2html.create_pages_cx_terms() print("Done") main() elif choice == "21": print('\n') print("VIEWER - CP Creating index from xls file...") import airm2html airm2html.create_index_cp_global() print("Done") print("VIEWER - CP (Supps) Creating index from xls file...") airm2html.create_index_cp_supp() print("Done") main() elif choice == "22": print('\n') print("VIEWER - CP Creating pages from xls file...") import airm2html airm2html.create_pages_cp_concepts() print("Done") main() elif choice == "23": print('\n') print("VIEWER - LOGICAL Creating index from xls file...") import airm2html airm2html.create_index_logical_global() print("Done") print("VIEWER - LOGICAL (Supps) Creating index from xls file...") airm2html.create_index_logical_supp() print("Done") main() elif choice == "24": print('\n') print("VIEWER - LOGICAL Creating pages from xls file...") import airm2html airm2html.create_pages_logical_concepts() print("Done") main() elif choice == "25": print('\n') print("Creating html pages from xls file...") import aixm_adr2html aixm_adr2html.create_html() print("Done") main() elif choice == "26": print('\n') print("Creating html pages from xls file...") import aixm_adr2html aixm_adr2html.create_html_pages() print("Done") main() else: print("I don't understand your choice.") main()
def old_create_html(): import airm airm = airm.Airm() #configuration json_mapping_file = "data/json/fixm-mapping.json" #assets/js/fixm-mapping-test.json OR assets/js/fixm-mapping.json # creates developers/fixm-4.2.0-to-airm-1.0.0.html using developers/template.html html = open("data/html/templates/template.html").read() soup = BeautifulSoup(html, "lxml") f = open("data/html/export/developers/fixm-4.2.0-to-airm-1.0.0.html", "w+") f.write(soup.prettify()) f.close() # creates assets/js/mapping-table.js injecting the contents of assets/js/fixm-mapping.json as values and using assets/js/mapping-table-template.js with open(json_mapping_file) as json_file: with open( 'data/html/templates/mapping-table-template.js') as fin, open( 'data/html/export/assets/js/mapping-table.js', 'w') as fout: for line in fin: if ' var values = [] ;' in line: line = ' var values = ' + json_file.read() + ' ;\n' fout.write(line) # loads assets/js/fixm-mapping.json as a dictionary with open(json_mapping_file) as json_file: json_contents = json_file.read() mapping_list = json.loads(json_contents) # creates one page per concept in the mapping html = open("data/html/templates/concept-template.html").read() for trace in mapping_list: #print(trace['Concept']) components = str(trace['Id']).split(":") container_concept = components[1] #TO-DO replace shortnames in id with full namespaces full_id = str(trace['Id']) # creates developers/fixm-4.2.0-to-airm-1.0.0/CONCEPT NAME.html using developers/concept-template.html soup = BeautifulSoup(html, "lxml") # container concept for breadcrumb soup.find( text="CONTAINER CONCEPT HERE").replace_with(container_concept) # name for heading soup.find(text="CONTAINER@CONCEPT NAME HERE").replace_with( trace['Concept']) # ID for sub heading soup.find(text="FULL ID HERE").replace_with(full_id) # definition soup.find(text="DEFINITION HERE").replace_with(str( trace['Definition'])) # type soup.find(text="TYPE HERE").replace_with(str(trace['Type'])) # rationale soup.find(text="RATIONALE HERE").replace_with(str(trace['Rationale'])) # notes soup.find(text="NOTES HERE").replace_with(str(trace['Notes'])) # semantic correspondence name and url new_link = BeautifulSoup( '<a href="' + trace['url'] + '">' + trace['Correspondence'] + '</a>', "lxml") row = soup.find(text="AIRM CONCEPT HERE").parent row.clear() row.append(new_link.a) # AIRM concept definition entry = airm.load_and_find_urn(trace['urn']) soup.find(text="AIRM CONCEPT DEFINITION HERE").replace_with( entry["definition"]) # semantic correspondence url new_link = BeautifulSoup( '<a href="' + trace['addurl'] + '">' + trace['Additional'] + '</a>', "lxml") addrow = soup.find(text="AIRM ADDITIONAL CONCEPT HERE").parent addrow.clear() addrow.append(new_link.a) #page_content = BeautifulSoup("<div>"+container_concept+"</div>") #soup.find(text="CONTAINER CONCEPT HERE").replace_with(page_content) filename = trace['Concept'] filename = filename.replace("@", ".") f = open(mapping_pages_directory + "/" + filename + ".html", "w+") f.write(soup.prettify()) f.close() #DONE print(str(airm.not_found_counter) + " AIRM URNs were not found") print("Done")
def create_html_pages(): import amxm import airm amxm = amxm.Amxm() airm = airm.Airm() amxm_info_concepts_dict = amxm.get_information_concepts() for info_concept in amxm_info_concepts_dict: if info_concept['Information Concept']!="missing data": print(info_concept['Information Concept']) #creates soup for concept page using concept-template.html html = open("data/html/templates/AMXM-concept-template.html").read() soup = BeautifulSoup(html, "lxml") #span = soup.new_tag("span") #span.string = str(info_concept['Information Concept']) #soup.find(id="BC_INFO_CONCEPT_NAME").insert(0,span)span = soup.new_tag("span") #span.string = str(info_concept['Information Concept']) soup.title.string = str(info_concept['Information Concept'])+" - AMXM 2.0.0 to AIRM 1.0.0 | AIRM.aero" soup.find(text="FIXM_CLASS_NAME_BC").replace_with(str(info_concept['Information Concept'])) definition = str(info_concept["Concept Definition"]) definition = definition.replace("Definition: ","") soup.find(text="FIXM_CLASS_DEFINITION").replace_with(definition) h2 = soup.new_tag("h2") h2.string = str(info_concept['Information Concept']) soup.find(id="INFO_CONCEPT_NAME").insert(0,h2) code = soup.new_tag("code") datac_identifier = info_concept['Concept Identifier'] code.string = datac_identifier code["class"] = "text-secondary" soup.find(id="INFO_CONCEPT_NAME").insert(1,code) traces = amxm.get_traces_by_info_concept(info_concept['Information Concept']) for trace in traces: if trace['Data Concept'] != "missing data": print('\t'+trace['Data Concept']) tr = soup.new_tag("tr") if trace["Data Concept"] != "": td_dc_name = soup.new_tag("td") url = "#"+trace["Data Concept"] text = trace["Data Concept"] new_link = soup.new_tag("a") new_link['href'] = url new_link.string = text td_dc_name.insert(1,new_link) tr.insert(1,td_dc_name) if trace["Concept Definition"] != "": td_def = soup.new_tag("td") definition = str(trace["Concept Definition"]) definition = definition.replace("Definition: ","") td_def.string = definition tr.insert(2,td_def) soup.find(id="DATA_CONCEPTS_LIST").insert(1,tr) for trace in traces: if trace['Data Concept'] != "missing data": property_div = soup.new_tag("div") property_div["style"] = "border: 0.5px solid #b2b2b2;border-radius: 4px;box-shadow: 2px 2px #b2b2b2;padding: 15px;padding-bottom: 0px; margin-bottom: 30px" h3 = soup.new_tag("h3") h3.string = str(trace["Data Concept"]) h3["id"] = str(trace["Data Concept"]) h3["style"] = "padding-top: 120px; margin-top: -120px;" property_div.insert(0,h3) code = soup.new_tag("code") identifier = trace['Concept Identifier'] code.string = identifier code["class"] = "text-secondary" property_div.insert(1,code) p = soup.new_tag("p") definition = str(trace["Concept Definition"]) definition = definition.replace("Definition: ","") p.string = definition br = soup.new_tag("br") p.insert(2,br) property_div.insert(2,p) sc_h5 = soup.new_tag("h5") sc_h5.string = "Semantic Correspondence" sc_h5['style'] = "margin-top: 40px;" property_div.insert(3,sc_h5) sc_div = soup.new_tag("div") sc_div["class"] = "table-responsive" sc_table = soup.new_tag("table") sc_table["class"] = "table" sc_thead = soup.new_tag("thead") tr = soup.new_tag("tr") th = soup.new_tag("th") th.string = "AIRM Concept" tr.insert(1,th) th = soup.new_tag("th") th.string = "Definition" tr.insert(2,th) sc_thead.insert(1,tr) sc_table.insert(1,sc_thead) tbody = soup.new_tag("tbody") #for each insert row print('\t\tSemantic Corresponce:') if str(trace['AIRM Concept Identifier']) == "missing data": tr = soup.new_tag("tr") td = soup.new_tag("td") line = str(trace['Special Case']) url = create_url(line) text = create_name(line) a = soup.new_tag("a") a['href'] = url a['target'] = "_blank" a.string = text a["data-toggle"] = "tooltip" a["data-placement"] = "right" a["title"] = line td.insert(1,a) tr.insert(1,td) td = soup.new_tag("td") airm_entry = airm.load_and_find_urn(line) td.string = airm_entry["definition"] tr.insert(2,td) tbody.insert(1,tr) else: sem_correspondences = str(trace['AIRM Concept Identifier']).split('\n') for line in sem_correspondences: print('\t\t\t'+line) tr = soup.new_tag("tr") td = soup.new_tag("td") url = create_url(line) text = create_name(line) a = soup.new_tag("a") a['href'] = url a['target'] = "_blank" a.string = text a["data-toggle"] = "tooltip" a["data-placement"] = "right" a["title"] = line td.insert(1,a) tr.insert(1,td) td = soup.new_tag("td") airm_entry = airm.load_and_find_urn(line) td.string = airm_entry["definition"] tr.insert(2,td) tbody.insert(1,tr) sc_table.insert(2,tbody) sc_div.insert(1,sc_table) property_div.insert(4,sc_div) if str(trace["Rationale"]) != "missing data": h5 = soup.new_tag("h5") h5.string = "Rationale" property_div.insert(5,h5) p = soup.new_tag("p") p.string = str(trace["Rationale"]) print('Rationale:'+str(trace["Rationale"])) property_div.insert(6,p) if str(trace["Remarks"]) != "missing data": notes_h5 = soup.new_tag("h5") notes_h5.string = "Remarks" property_div.insert(7,notes_h5) p = soup.new_tag("p") p.string = str(trace["Remarks"]) print('Remarks:'+str(trace["Remarks"])) property_div.insert(8,p) top_link_p = soup.new_tag("p") new_link = soup.new_tag("a") new_link['href'] = "#top" new_icon = soup.new_tag("i") new_icon['class'] = "fa fa-arrow-circle-up" new_icon["data-toggle"] = "tooltip" new_icon["data-placement"] = "left" new_icon["title"] = "Top of page" new_link.insert(1,new_icon) top_link_p.insert(1,new_link) top_link_p['class'] = "text-right" property_div.insert(9,top_link_p) soup.find(id="DATA_CONCEPTS_DETAIL").insert(1,property_div) f= open("docs/developers/amxm-2.0.0-to-airm-1.0.0/"+str(info_concept['Information Concept'])+".html","w+") f.write(soup.prettify()) f.close()