def main(): """ Main program. """ global VERBOSE # prevent local creation of variable global DEPLOYMENT # deployment set in topology xml only and used to install new instance dicts ConfigManager.ConfigManager.getInstance() Parser = pinit() (opt, args) = Parser.parse_args() VERBOSE = opt.verbose_flag # # Parse the input Topology XML filename # if len(args) == 0: print("Usage: %s [options] xml_filename" % sys.argv[0]) return elif len(args) == 1: xml_filename = args[0] else: print("ERROR: Too many filenames, should only have one") return # # Check for BUILD_ROOT variable for XML port searches # if not opt.build_root_overwrite is None: set_build_roots(opt.build_root_overwrite) if VERBOSE: print("BUILD_ROOT set to %s" % ",".join(get_build_roots())) else: if ("BUILD_ROOT" in os.environ.keys()) == False: print("ERROR: Build root not set to root build path...") sys.exit(-1) set_build_roots(os.environ["BUILD_ROOT"]) if VERBOSE: print("BUILD_ROOT set to %s" % ",".join(get_build_roots())) if not "Ai" in xml_filename: print("ERROR: Missing Ai at end of file name...") raise OSError xml_type = XmlParser.XmlParser(xml_filename)() if xml_type == "assembly" or xml_type == "deployment": if VERBOSE: print("Detected Topology XML so Generating Topology C++ Files...") the_parsed_topology_xml = XmlTopologyParser.XmlTopologyParser( xml_filename) DEPLOYMENT = the_parsed_topology_xml.get_deployment() print("Found assembly or deployment named: %s\n" % DEPLOYMENT) generate_xml_dict(the_parsed_topology_xml, xml_filename, opt) else: PRINT.info("Invalid XML found...this format not supported") sys.exit(-1) sys.exit(0)
def load_patched_topology(topology_xml: Path, settings_dir: Path = None) -> Topology: """ Loads a patched model of the topology Loads a topology and attempts to correlate it across the various specification files. It is returned as a "Topology" object, which can be validated. Args: topology_xml: topology XML path to load. XML validation should occur first. settings_dir: (optional) directory with settings.ini. Default: location of settings.ini w.r.t toplogy_xml Returns: topology module """ settings_dir = topology_xml.parent.parent if settings_dir is None else settings_dir settings = IniSettings.load(None, cwd=settings_dir) # Any patching for the AC models will be undone afterwards build_roots_old = get_build_roots() ac_constants_old = os.environ.get("FPRIME_AC_CONSTANTS_FILE", None) stdout = sys.stdout try: # Base locations as dictated by the settings file base_locations = [ settings.get("framework_path", settings_dir.parent), settings.get("project_root", None) ] base_locations.extend(settings.get("library_locations", [])) # Setup build roots for using the autocoder modules set_build_roots(":".join([ str(location) for location in base_locations if location is not None ])) ac_consts = Path( settings.get( "ac_constants", Path(settings.get("framework_path")) / "config" / "AcConstants.ini")) if ac_consts and ac_consts.exists(): os.environ["FPRIME_AC_CONSTANTS_FILE"] = str(ac_consts) # Now that all the environment patching is finished, loads of the toplogy model should run smoothly try: sys.stdout = None # Prevent all the stdout output return __topology_loader(topology_xml) except InconsistencyException as inc: raise # Pass through if already inconsistency exception except Exception as exc: # Remap non-InconsistencyException exceptions raise InconsistencyException( "Error when loading model: {}".format(exc)) from exc # Clean-up the system state after our loading finally: sys.stdout = stdout set_build_roots(":".join(build_roots_old)) if ac_constants_old is not None: os.environ["FPRIME_AC_CONSTANTS_FILE"] = ac_constants_old
def main(): """ Main program. """ global VERBOSE global BUILD_ROOT global DEPLOYMENT # Enable printing model error output to STDOUT Logger.connectOutputLogger(None) Parser = pinit() (opt, args) = Parser.parse_args() VERBOSE = opt.verbose_flag ConfigManager.ConfigManager.getInstance() # # Handle command line arguments # # # Parse the input Topology XML filename # if len(args) == 0: print("ERROR: Usage: %s [options] xml_filename" % sys.argv[0]) return elif len(args) == 1: xml_filename = args[0] else: print("ERROR: Too many filenames, should only have one") return # # Check for BUILD_ROOT variable for XML port searches # if not opt.build_root_overwrite is None: set_build_roots(opt.build_root_overwrite) if VERBOSE: print("BUILD_ROOT set to %s" % ",".join(get_build_roots())) else: if ("BUILD_ROOT" in os.environ.keys()) == False: print("ERROR: Build root not set to root build path...") sys.exit(-1) set_build_roots(os.environ["BUILD_ROOT"]) if VERBOSE: print("BUILD_ROOT set to %s" % ",".join(get_build_roots())) # # Write test component # if not "Ai" in xml_filename: print("ERROR: Missing Ai at end of file name...") raise OSError # # Create python dictionaries # print(xml_filename) xml_type = XmlParser.XmlParser(xml_filename)() # Only Components can be inputted if xml_type == "component": if VERBOSE: print("Detected Component XML so GeneratingComponent C++ Files...") the_parsed_component_xml = XmlComponentParser.XmlComponentParser( xml_filename) component_model = parse_component(the_parsed_component_xml, xml_filename, opt) if VERBOSE: print("\nGenerating tests...") generate_impl_files(opt, component_model) else: print("ERROR: {} is used for component XML files, not {} XML files". format(sys.argv[0], xml_type)) sys.exit(-1) sys.exit(0)
def generate_topology(the_parsed_topology_xml, xml_filename, opt): DEBUG.debug(f"Topology xml type description file: {xml_filename}") generator = TopoFactory.TopoFactory.getInstance() if not (opt.default_topology_dict or opt.xml_topology_dict): generator.set_generate_ID(False) topology_model = generator.create(the_parsed_topology_xml) if opt.is_ptr: PRINT.info("Topology Components will be initialized as Pointers. ") topology_model.is_ptr = opt.is_ptr if opt.connect_only: PRINT.info("Only port connections will be generated for Topology.") topology_model.connect_only = opt.connect_only generator = GenFactory.GenFactory.getInstance() if "Ai" in xml_filename: base = xml_filename.split("Ai")[0] h_instance_name = base + "_H" cpp_instance_name = base + "_Cpp" csv_instance_name = base + "_ID" cmd_html_instance_name = base + "_Cmd_HTML" channel_html_instance_name = base + "_Channel_HTML" event_html_instance_name = base + "_Event_HTML" else: PRINT.info("Missing Ai at end of file name...") raise OSError # Figures out what visitor to use if opt.default_topology_dict or opt.xml_topology_dict: generator.configureVisitor(h_instance_name, "InstanceTopologyHVisitor", True, True) generator.configureVisitor(cpp_instance_name, "InstanceTopologyCppVisitor", True, True) else: generator.configureVisitor(h_instance_name, "TopologyHVisitor", True, True) generator.configureVisitor(cpp_instance_name, "TopologyCppVisitor", True, True) # Used to generate base ID/base ID window CSV files generator.configureVisitor(csv_instance_name, "TopologyIDVisitor", True, True) # Used to generate HTML tables of ID's etc. if opt.default_topology_dict or opt.xml_topology_dict: generator.configureVisitor(cmd_html_instance_name, "InstanceTopologyCmdHTMLVisitor", True, True) generator.configureVisitor(channel_html_instance_name, "InstanceTopologyChannelsTMLVisitor", True, True) generator.configureVisitor(event_html_instance_name, "InstanceTopologyEventsHTMLVisitor", True, True) # uses the topology model to process the items if opt.default_topology_dict or opt.xml_topology_dict: # create list of used parsed component xmls parsed_xml_dict = {} for comp in the_parsed_topology_xml.get_instances(): if comp.get_type() in topology_model.get_base_id_dict(): parsed_xml_dict[comp.get_type()] = comp.get_comp_xml() # comp.set_component_object(comp.) else: PRINT.info( f"Components with type {comp.get_type()} aren't in the topology model." ) # Hack to set up deployment path for instanced dictionaries (if one exists remove old one) # if opt.default_topology_dict: for build_root in get_build_roots(): if not os.path.exists(os.path.join(build_root, DEPLOYMENT)): continue os.environ["DICT_DIR"] = os.path.join(build_root, DEPLOYMENT, "py_dict") break else: raise FileNotFoundError( f"{DEPLOYMENT} not found in any of: {get_build_roots()}") dict_dir = os.environ["DICT_DIR"] PRINT.info( f"Removing old instanced topology dictionaries in: {dict_dir}") import shutil if os.path.exists(dict_dir): shutil.rmtree(dict_dir) PRINT.info( f"Overriding for instanced topology dictionaries the --dict_dir option with xml derived path: {dict_dir}" ) # xml_list = [] for parsed_xml_type in parsed_xml_dict: if parsed_xml_dict[parsed_xml_type] is None: PRINT.info( f"XML of type {parsed_xml_type} is being used, but has not been parsed correctly. Check if file exists or add xml file with the 'import_component_type' tag to the Topology file." ) raise Exception() xml_list.append(parsed_xml_dict[parsed_xml_type]) generate_component_instance_dictionary( parsed_xml_dict[parsed_xml_type], opt, topology_model) topology_model.set_instance_xml_list(xml_list) if opt.xml_topology_dict: topology_dict = etree.Element("dictionary") topology_dict.attrib[ "topology"] = the_parsed_topology_xml.get_name() topology_dict.attrib["framework_version"] = get_fprime_version() top_dict_gen = TopDictGenerator.TopDictGenerator( parsed_xml_dict, PRINT.debug) for comp in the_parsed_topology_xml.get_instances(): comp_type = comp.get_type() comp_name = comp.get_name() comp_id = int(comp.get_base_id(), 0) PRINT.debug( f"Processing {comp_name} [{comp_type}] ({hex(comp_id)})") top_dict_gen.set_current_comp(comp) top_dict_gen.check_for_enum_xml() top_dict_gen.check_for_serial_xml() top_dict_gen.check_for_commands() top_dict_gen.check_for_channels() top_dict_gen.check_for_events() top_dict_gen.check_for_parameters() top_dict_gen.check_for_arrays() top_dict_gen.remove_duplicate_enums() topology_dict.append(top_dict_gen.get_enum_list()) topology_dict.append(top_dict_gen.get_serializable_list()) topology_dict.append(top_dict_gen.get_array_list()) topology_dict.append(top_dict_gen.get_command_list()) topology_dict.append(top_dict_gen.get_event_list()) topology_dict.append(top_dict_gen.get_telemetry_list()) topology_dict.append(top_dict_gen.get_parameter_list()) fileName = the_parsed_topology_xml.get_xml_filename().replace( "Ai.xml", "Dictionary.xml") PRINT.info(f"Generating XML dictionary {fileName}") fd = open( fileName, "wb" ) # Note: binary forces the same encoding of the source files fd.write(etree.tostring(topology_dict, pretty_print=True)) initFiles = generator.create("initFiles") # startSource = generator.create("startSource") includes1 = generator.create("includes1") # includes2 = generator.create("includes2") # namespace = generator.create("namespace") public = generator.create("public") finishSource = generator.create("finishSource") # # Generate the source code products here. # # 1. Open all the files initFiles(topology_model) # # 2. Generate includes and static code here. includes1(topology_model) # # 3. Generate public function to instance components and connect them here. public(topology_model) # # 4. Generate final code here and close all files. finishSource(topology_model) return topology_model
def main(): """ Main program. """ global ERROR # prevent local creation of variable global VERBOSE # prevent local creation of variable global GEN_TEST_CODE # indicate if test code should be generated global DEPLOYMENT # deployment set in topology xml only and used to install new instance dicts ERROR = False # Sets up the initial (singleton) instance ConfigManager.ConfigManager.getInstance() Parser = pinit() (opt, args) = Parser.parse_args() VERBOSE = opt.verbose_flag # Check that the specified working directory exists. Remember, the # default working directory is the current working directory which # always exists. We are basically only checking for when the user # specifies an alternate working directory. if os.path.exists(opt.work_path) == False: Parser.error(f"Specified path does not exist ({opt.work_path})!") working_dir = opt.work_path # Get the current working directory so that we can return to it when # the program completes. We always want to return to the place where # we started. starting_directory = os.getcwd() os.chdir(working_dir) # print working_dir # print os.getcwd() # Configure the logging. log_level = opt.logger.upper() log_level_dict = dict() log_level_dict["QUIET"] = None log_level_dict["DEBUG"] = logging.DEBUG log_level_dict["INFO"] = logging.INFO log_level_dict["WARNING"] = logging.WARN log_level_dict["ERROR"] = logging.ERROR log_level_dict["CRITICAL"] = logging.CRITICAL if log_level_dict[log_level] is None: stdout_enable = False else: stdout_enable = True log_fd = opt.logger_output # For now no log file Logger.connectDebugLogger(log_level_dict[log_level], log_fd, stdout_enable) Logger.connectOutputLogger(log_fd) # # Parse the input Component XML file and create internal meta-model # if len(args) == 0: PRINT.info(f"Usage: {sys.argv[0]} [options] xml_filename") return else: xml_filenames = args[0:] # # Check for BUILD_ROOT variable for XML port searches # if opt.build_root_flag == True: # Check for BUILD_ROOT env. variable if ("BUILD_ROOT" in list(os.environ.keys())) == False: PRINT.info( "ERROR: The -b command option requires that BUILD_ROOT environmental variable be set to root build path..." ) sys.exit(-1) else: set_build_roots(os.environ.get("BUILD_ROOT")) for xml_filename in xml_filenames: xml_type = XmlParser.XmlParser(xml_filename)() if xml_type == "component": DEBUG.info( "Detected Component XML so Generating Component C++ Files...") the_parsed_component_xml = XmlComponentParser.XmlComponentParser( xml_filename) generate_component(the_parsed_component_xml, os.path.basename(xml_filename), opt) dependency_parser = the_parsed_component_xml elif xml_type == "interface": DEBUG.info( "Detected Port type XML so Generating Port type C++ Files...") the_parsed_port_xml = XmlPortsParser.XmlPortsParser(xml_filename) generate_port(the_parsed_port_xml, os.path.basename(xml_filename)) dependency_parser = the_parsed_port_xml elif xml_type == "serializable": DEBUG.info( "Detected Serializable XML so Generating Serializable C++ Files..." ) the_serial_xml = XmlSerializeParser.XmlSerializeParser( xml_filename) generate_serializable(the_serial_xml, opt) dependency_parser = the_serial_xml elif xml_type == "assembly" or xml_type == "deployment": DEBUG.info( "Detected Topology XML so Generating Topology C++ Files...") the_parsed_topology_xml = XmlTopologyParser.XmlTopologyParser( xml_filename) DEPLOYMENT = the_parsed_topology_xml.get_deployment() print("Found assembly or deployment named: %s\n" % DEPLOYMENT) generate_topology(the_parsed_topology_xml, os.path.basename(xml_filename), opt) dependency_parser = the_parsed_topology_xml elif xml_type == "enum": DEBUG.info( "Detected Enum XML so Generating hpp, cpp, and py files...") curdir = os.getcwd() if EnumGenerator.generate_enum(xml_filename): ERROR = False PRINT.info( f"Completed generating files for {xml_filename} Enum XML...." ) else: ERROR = True os.chdir(curdir) elif xml_type == "array": DEBUG.info( "Detected Array XML so Generating hpp, cpp, and py files...") curdir = os.getcwd() if ArrayGenerator.generate_array(xml_filename): ERROR = False PRINT.info( f"Completed generating files for {xml_filename} Array XML..." ) else: ERROR = True os.chdir(curdir) else: PRINT.info("Invalid XML found...this format not supported") ERROR = True if opt.dependency_file is not None: if opt.build_root_flag: generate_dependency_file( opt.dependency_file, os.path.basename(xml_filename), list(get_build_roots())[0], dependency_parser, xml_type, ) # Always return to directory where we started. os.chdir(starting_directory) if ERROR == True: sys.exit(-1) else: sys.exit(0)
def main(): """ Main program. """ global VERBOSE global DEPLOYMENT Parser = pinit() (opt, args) = Parser.parse_args() VERBOSE = opt.verbose_flag ConfigManager.ConfigManager.getInstance() # Check for BUILD_ROOT env. variable if ("BUILD_ROOT" in os.environ.keys()) == False: print("ERROR: Build root not set to root build path...") sys.exit(-1) else: # Handle BUILD_ROOT BUILD_ROOT = os.environ["BUILD_ROOT"] ModelParser.BUILD_ROOT = BUILD_ROOT if VERBOSE: print("BUILD_ROOT set to %s in environment" % BUILD_ROOT) # # Parse the input Topology XML filename # if len(args) == 0: print("ERROR: Usage: %s [options] xml_filename" % sys.argv[0]) return elif len(args) == 1: xml_filename = args[0] else: print("ERROR: Too many filenames, should only have one") return # # Check for BUILD_ROOT variable for XML port searches # if not opt.build_root_overwrite is None: set_build_roots(opt.build_root_overwrite) if VERBOSE: print("BUILD_ROOT set to %s" % ",".join(get_build_roots())) else: if ("BUILD_ROOT" in os.environ.keys()) == False: print("ERROR: Build root not set to root build path...") sys.exit(-1) set_build_roots(os.environ["BUILD_ROOT"]) if VERBOSE: print("BUILD_ROOT set to %s" % ",".join(get_build_roots())) if not "Ai" in xml_filename: print("ERROR: Missing Ai at end of file name...") raise OSError # # Create python dictionaries # xml_type = XmlParser.XmlParser(xml_filename)() # Only Topologies can be inputted if xml_type == "assembly" or xml_type == "deployment": if VERBOSE: print("Detected Topology XML so Generating Topology C++ Files...") the_parsed_topology_xml = XmlTopologyParser.XmlTopologyParser( xml_filename) DEPLOYMENT = the_parsed_topology_xml.get_deployment() if VERBOSE: print("Found assembly or deployment named: %s\n" % DEPLOYMENT) generate_pymods(the_parsed_topology_xml, xml_filename, opt) else: print("ERROR: Invalid XML found...this format not supported") sys.exit(-1) sys.exit(0)
def main(): parser = pinit() (opts, args) = parser.parse_args() Logger.connectOutputLogger(opts.logger_output) # Global logger init. below. PRINT = logging.getLogger("output") # # Parse the input Component XML file and create internal meta-model # if len(args) == 0: PRINT.info("\nUsage: %s [options] xml_filename" % sys.argv[0]) PRINT.info("ERROR: Cannot create dictionary\n") sys.exit(-1) return else: xmlFilename = args[0] # # Check for BUILD_ROOT variable for XML port searches # if not opts.build_root_overwrite is None: set_build_roots(opts.build_root_overwrite) else: if ("BUILD_ROOT" in os.environ.keys()) == False: print("ERROR: Build root not set to root build path...") sys.exit(-1) set_build_roots(os.environ["BUILD_ROOT"]) parsedTopology = XmlTopologyParser.XmlTopologyParser(xmlFilename) deployment = parsedTopology.get_deployment() outFilename = deployment + "Dictionary.json" outFilepath = "/".join([opts.work_path, outFilename]) descriptionFilename = "/".join([opts.work_path, "/dictPath.txt"]) dictionary = {} dictionary[deployment] = { "events": {}, "channels": {}, "commands": {}, "serializables": {}, } events = dictionary[deployment]["events"] channels = dictionary[deployment]["channels"] commands = dictionary[deployment]["commands"] serializables = dictionary[deployment]["serializables"] limitLabels = [ "low_red", "low_orange", "low_yellow", "high_yellow", "high_orange", "high_red", ] unitLabels = ["label", "gain", "offset"] instanceIDs = {"events": {}, "channels": {}, "commands": {}} for inst in parsedTopology.get_instances(): serializableFilenames = inst.get_comp_xml().get_serializable_type_files() for filename in serializableFilenames: for build_root in get_build_roots(): if os.path.exists(os.path.join(build_root, filename)): break else: raise FileNotFoundError(os.path.join(build_root, filename)) parsedSerializable = XmlSerializeParser.XmlSerializeParser( os.path.join(build_root, filename) ) name = parsedSerializable.get_name() namespace = parsedSerializable.get_namespace() members = [] membersRaw = parsedSerializable.get_members() for member in membersRaw: members.append( { "name": member[0], "type": format_type_item(member[1]), "size": member[2], "format_string": member[3], "comment": member[4], } ) metadata = { "name": name, "namespace": namespace, "members": members, } serializables[namespace + "::" + name] = metadata comp_name = inst.get_name() comp_type = inst.get_type() comp_namespace = inst.get_namespace() component = "::".join([comp_namespace, comp_type]) base_id = inst.get_base_id() if "0x" in base_id: base_id = int(base_id, 16) else: base_id = int(base_id) comp_parser = inst.get_comp_xml() comp_dir = dir(comp_parser) if "get_commands" in comp_dir: for command in comp_parser.get_commands(): opcode = command.get_opcodes()[0] opcode = int(opcode, 16) if ("0x" in opcode) else int(opcode) opcode += base_id name = command.get_mnemonic() if name in instanceIDs["commands"]: instanceIDs["commands"][name].append(opcode) else: instanceIDs["commands"][name] = [opcode] arguments = [] for arg in command.get_args(): typeItem = arg.get_type() typeObj = format_type_item(typeItem) arguments.append(typeObj) metadata = { "id": opcode, "name": name, "instance": comp_name, "description": command.get_comment(), "component": component, "arguments": arguments, } commands[opcode] = metadata if "get_events" in comp_dir: for event in comp_parser.get_events(): ev_id = event.get_ids()[0] ev_id = int(ev_id, 16) if ("0x" in ev_id) else int(ev_id) ev_id += base_id name = event.get_name() if name in instanceIDs["events"]: instanceIDs["events"][name].append(ev_id) else: instanceIDs["events"][name] = [ev_id] arguments = [] for arg in event.get_args(): typeItem = arg.get_type() typeObj = format_type_item(typeItem) arguments.append(typeObj) metadata = { "id": ev_id, "description": event.get_comment(), "name": name, "instance": comp_name, "component": component, "format_string": event.get_format_string(), "severity": event.get_severity(), "telem_type": "event", "arguments": arguments, } events[ev_id] = metadata if "get_channels" in comp_dir: for channel in comp_parser.get_channels(): ch_id = channel.get_ids()[0] ch_id = int(ch_id, 16) if ("0x" in ch_id) else int(ch_id) ch_id += base_id name = channel.get_name() if name in instanceIDs["channels"]: instanceIDs["channels"][name].append(ch_id) else: instanceIDs["channels"][name] = [ch_id] units = [] for unit in channel.get_units(): units.append(dict(list(zip(unitLabels, unit)))) typeObj = channel.get_type() type_name = "" if isinstance(typeObj, str): type_name = typeObj else: type_name = "Enum" enum_dict = {} for (i, enum) in enumerate(typeObj[1]): enum_dict[str(i)] = enum[0] metadata = { "id": ch_id, "name": name, "instance": comp_name, "description": channel.get_comment(), "telem_type": "channel", "component": component, "format_string": channel.get_format_string(), "limits": dict(list(zip(limitLabels, channel.get_limits()))), "type": type_name, "units": units, } if type_name == "Enum": metadata["enum_dict"] = enum_dict metadata["format_string"] = "%s" channels[ch_id] = metadata # Prepend instance name to commands, events, and channels with duplicate component types # PRINT.info(json.dumps(instanceIDs, indent=4)) for telemetryType, idDict in list(instanceIDs.items()): for name, ids in list(idDict.items()): if len(ids) > 1: for id in ids: telem = dictionary[deployment][telemetryType][id] name = telem["name"] instanceName = telem["instance"] name = "_".join([instanceName, name]) telem["name"] = name # Stringify JSON -- indent option makes it readable, can be removed if file # size is an issue jsonStr = json.dumps(dictionary, indent=4) # Create output directory if it doesn't exist directory = os.path.dirname(outFilepath) if not os.path.exists(directory): os.makedirs(directory) # Write JSON to file outFile = open(outFilepath, "w") outFile.write(jsonStr) descriptionFile = open(descriptionFilename, "w") descriptionFile.write(outFilepath) PRINT.info("\nJSON output written to %s" % outFilepath) outFile.close()