def main(): """ Main program. """ global VERBOSE # prevent local creation of variable global DEPLOYMENT # deployment set in topology xml only and used to install new instance dicts ConfigManager.ConfigManager.getInstance() Parser = pinit() (opt, args) = Parser.parse_args() VERBOSE = opt.verbose_flag # # Parse the input Topology XML filename # if len(args) == 0: print("Usage: %s [options] xml_filename" % sys.argv[0]) return elif len(args) == 1: xml_filename = args[0] else: print("ERROR: Too many filenames, should only have one") return # # Check for BUILD_ROOT variable for XML port searches # if not opt.build_root_overwrite is None: set_build_roots(opt.build_root_overwrite) if VERBOSE: print("BUILD_ROOT set to %s" % ",".join(get_build_roots())) else: if ("BUILD_ROOT" in os.environ.keys()) == False: print("ERROR: Build root not set to root build path...") sys.exit(-1) set_build_roots(os.environ["BUILD_ROOT"]) if VERBOSE: print("BUILD_ROOT set to %s" % ",".join(get_build_roots())) if not "Ai" in xml_filename: print("ERROR: Missing Ai at end of file name...") raise OSError xml_type = XmlParser.XmlParser(xml_filename)() if xml_type == "assembly" or xml_type == "deployment": if VERBOSE: print("Detected Topology XML so Generating Topology C++ Files...") the_parsed_topology_xml = XmlTopologyParser.XmlTopologyParser( xml_filename) DEPLOYMENT = the_parsed_topology_xml.get_deployment() print("Found assembly or deployment named: %s\n" % DEPLOYMENT) generate_xml_dict(the_parsed_topology_xml, xml_filename, opt) else: PRINT.info("Invalid XML found...this format not supported") sys.exit(-1) sys.exit(0)
def load_patched_topology(topology_xml: Path, settings_dir: Path = None) -> Topology: """ Loads a patched model of the topology Loads a topology and attempts to correlate it across the various specification files. It is returned as a "Topology" object, which can be validated. Args: topology_xml: topology XML path to load. XML validation should occur first. settings_dir: (optional) directory with settings.ini. Default: location of settings.ini w.r.t toplogy_xml Returns: topology module """ settings_dir = topology_xml.parent.parent if settings_dir is None else settings_dir settings = IniSettings.load(None, cwd=settings_dir) # Any patching for the AC models will be undone afterwards build_roots_old = get_build_roots() ac_constants_old = os.environ.get("FPRIME_AC_CONSTANTS_FILE", None) stdout = sys.stdout try: # Base locations as dictated by the settings file base_locations = [ settings.get("framework_path", settings_dir.parent), settings.get("project_root", None) ] base_locations.extend(settings.get("library_locations", [])) # Setup build roots for using the autocoder modules set_build_roots(":".join([ str(location) for location in base_locations if location is not None ])) ac_consts = Path( settings.get( "ac_constants", Path(settings.get("framework_path")) / "config" / "AcConstants.ini")) if ac_consts and ac_consts.exists(): os.environ["FPRIME_AC_CONSTANTS_FILE"] = str(ac_consts) # Now that all the environment patching is finished, loads of the toplogy model should run smoothly try: sys.stdout = None # Prevent all the stdout output return __topology_loader(topology_xml) except InconsistencyException as inc: raise # Pass through if already inconsistency exception except Exception as exc: # Remap non-InconsistencyException exceptions raise InconsistencyException( "Error when loading model: {}".format(exc)) from exc # Clean-up the system state after our loading finally: sys.stdout = stdout set_build_roots(":".join(build_roots_old)) if ac_constants_old is not None: os.environ["FPRIME_AC_CONSTANTS_FILE"] = ac_constants_old
def main(): """Process arguments: file old, file new, module deps to see if any mod deps has changed""" set_build_roots(os.environ["BUILD_ROOT"]) parser = argparse.ArgumentParser( description="Has a module dependency changed within the locs file.") parser.add_argument("new_locs", help="New locs file") parser.add_argument("prev_locs", help="Original locs file") parser.add_argument("file_deps", nargs="+", help="Module deps") args_ns = parser.parse_args() if not os.path.exists(args_ns.new_locs): print(f"[ERROR] Failed to open {args_ns.new_locs}") sys.exit(1) # Previous files not generated if not os.path.exists(args_ns.prev_locs): print(f"No pervious locations") sys.exit(1) with open(args_ns.prev_locs, "r") as prev_locs_fh: prev_lines = prev_locs_fh.readlines() with open(args_ns.new_locs, "r") as new_locs_fh: new_lines = new_locs_fh.readlines() diff_output = list( difflib.unified_diff(prev_lines, new_lines, n=0, lineterm="\n")) subtracted_lines = [ diff for diff in diff_output if diff.startswith("-") and not diff.startswith("---") ] relative_mod_paths = [ line.split()[-1].strip('"') for line in subtracted_lines ] subtractions = [ os.path.abspath( os.path.join(os.path.dirname(args_ns.new_locs), rel_path)) for rel_path in relative_mod_paths ] changed = [] for subtraction in subtractions: if subtraction in args_ns.file_deps: changed.append(os.path.basename(subtraction)) if changed: print(f"{', '.join(changed)}") sys.exit(2) sys.exit(0)
def main(): """ Main program. """ global VERBOSE # prevent local creation of variable global BUILD_ROOT # environmental variable if set Parser = pinit() (opt, args) = Parser.parse_args() # opt.verbose_flag = True # # Parse the input Topology XML filename # if len(args) == 0: print(f"Usage: {sys.argv[0]} [options] xml_filename") return elif len(args) == 1: xml_filename = args[0] else: print("ERROR: Too many filenames, should only have one") return print("Processing packet file %s" % xml_filename) set_build_roots(os.environ.get("BUILD_ROOT")) packet_parser = TlmPacketParser(opt.verbose_flag, opt.dependency_file) try: packet_parser.gen_packet_file(xml_filename) except TlmPacketParseValueError as e: print("Packet XML parsing error: %s" % e) sys.exit(-1) except TlmPacketParseIOError as e: print("Packet XML file error: %s" % e) sys.exit(-1) sys.exit(0)
def main(): """ Main program. """ global VERBOSE global BUILD_ROOT global DEPLOYMENT # Enable printing model error output to STDOUT Logger.connectOutputLogger(None) Parser = pinit() (opt, args) = Parser.parse_args() VERBOSE = opt.verbose_flag ConfigManager.ConfigManager.getInstance() # # Handle command line arguments # # # Parse the input Topology XML filename # if len(args) == 0: print("ERROR: Usage: %s [options] xml_filename" % sys.argv[0]) return elif len(args) == 1: xml_filename = args[0] else: print("ERROR: Too many filenames, should only have one") return # # Check for BUILD_ROOT variable for XML port searches # if not opt.build_root_overwrite is None: set_build_roots(opt.build_root_overwrite) if VERBOSE: print("BUILD_ROOT set to %s" % ",".join(get_build_roots())) else: if ("BUILD_ROOT" in os.environ.keys()) == False: print("ERROR: Build root not set to root build path...") sys.exit(-1) set_build_roots(os.environ["BUILD_ROOT"]) if VERBOSE: print("BUILD_ROOT set to %s" % ",".join(get_build_roots())) # # Write test component # if not "Ai" in xml_filename: print("ERROR: Missing Ai at end of file name...") raise OSError # # Create python dictionaries # print(xml_filename) xml_type = XmlParser.XmlParser(xml_filename)() # Only Components can be inputted if xml_type == "component": if VERBOSE: print("Detected Component XML so GeneratingComponent C++ Files...") the_parsed_component_xml = XmlComponentParser.XmlComponentParser( xml_filename) component_model = parse_component(the_parsed_component_xml, xml_filename, opt) if VERBOSE: print("\nGenerating tests...") generate_impl_files(opt, component_model) else: print("ERROR: {} is used for component XML files, not {} XML files". format(sys.argv[0], xml_type)) sys.exit(-1) sys.exit(0)
def main(): """ Main program. """ global ERROR # prevent local creation of variable global VERBOSE # prevent local creation of variable global GEN_TEST_CODE # indicate if test code should be generated global DEPLOYMENT # deployment set in topology xml only and used to install new instance dicts ERROR = False # Sets up the initial (singleton) instance ConfigManager.ConfigManager.getInstance() Parser = pinit() (opt, args) = Parser.parse_args() VERBOSE = opt.verbose_flag # Check that the specified working directory exists. Remember, the # default working directory is the current working directory which # always exists. We are basically only checking for when the user # specifies an alternate working directory. if os.path.exists(opt.work_path) == False: Parser.error(f"Specified path does not exist ({opt.work_path})!") working_dir = opt.work_path # Get the current working directory so that we can return to it when # the program completes. We always want to return to the place where # we started. starting_directory = os.getcwd() os.chdir(working_dir) # print working_dir # print os.getcwd() # Configure the logging. log_level = opt.logger.upper() log_level_dict = dict() log_level_dict["QUIET"] = None log_level_dict["DEBUG"] = logging.DEBUG log_level_dict["INFO"] = logging.INFO log_level_dict["WARNING"] = logging.WARN log_level_dict["ERROR"] = logging.ERROR log_level_dict["CRITICAL"] = logging.CRITICAL if log_level_dict[log_level] is None: stdout_enable = False else: stdout_enable = True log_fd = opt.logger_output # For now no log file Logger.connectDebugLogger(log_level_dict[log_level], log_fd, stdout_enable) Logger.connectOutputLogger(log_fd) # # Parse the input Component XML file and create internal meta-model # if len(args) == 0: PRINT.info(f"Usage: {sys.argv[0]} [options] xml_filename") return else: xml_filenames = args[0:] # # Check for BUILD_ROOT variable for XML port searches # if opt.build_root_flag == True: # Check for BUILD_ROOT env. variable if ("BUILD_ROOT" in list(os.environ.keys())) == False: PRINT.info( "ERROR: The -b command option requires that BUILD_ROOT environmental variable be set to root build path..." ) sys.exit(-1) else: set_build_roots(os.environ.get("BUILD_ROOT")) for xml_filename in xml_filenames: xml_type = XmlParser.XmlParser(xml_filename)() if xml_type == "component": DEBUG.info( "Detected Component XML so Generating Component C++ Files...") the_parsed_component_xml = XmlComponentParser.XmlComponentParser( xml_filename) generate_component(the_parsed_component_xml, os.path.basename(xml_filename), opt) dependency_parser = the_parsed_component_xml elif xml_type == "interface": DEBUG.info( "Detected Port type XML so Generating Port type C++ Files...") the_parsed_port_xml = XmlPortsParser.XmlPortsParser(xml_filename) generate_port(the_parsed_port_xml, os.path.basename(xml_filename)) dependency_parser = the_parsed_port_xml elif xml_type == "serializable": DEBUG.info( "Detected Serializable XML so Generating Serializable C++ Files..." ) the_serial_xml = XmlSerializeParser.XmlSerializeParser( xml_filename) generate_serializable(the_serial_xml, opt) dependency_parser = the_serial_xml elif xml_type == "assembly" or xml_type == "deployment": DEBUG.info( "Detected Topology XML so Generating Topology C++ Files...") the_parsed_topology_xml = XmlTopologyParser.XmlTopologyParser( xml_filename) DEPLOYMENT = the_parsed_topology_xml.get_deployment() print("Found assembly or deployment named: %s\n" % DEPLOYMENT) generate_topology(the_parsed_topology_xml, os.path.basename(xml_filename), opt) dependency_parser = the_parsed_topology_xml elif xml_type == "enum": DEBUG.info( "Detected Enum XML so Generating hpp, cpp, and py files...") curdir = os.getcwd() if EnumGenerator.generate_enum(xml_filename): ERROR = False PRINT.info( f"Completed generating files for {xml_filename} Enum XML...." ) else: ERROR = True os.chdir(curdir) elif xml_type == "array": DEBUG.info( "Detected Array XML so Generating hpp, cpp, and py files...") curdir = os.getcwd() if ArrayGenerator.generate_array(xml_filename): ERROR = False PRINT.info( f"Completed generating files for {xml_filename} Array XML..." ) else: ERROR = True os.chdir(curdir) else: PRINT.info("Invalid XML found...this format not supported") ERROR = True if opt.dependency_file is not None: if opt.build_root_flag: generate_dependency_file( opt.dependency_file, os.path.basename(xml_filename), list(get_build_roots())[0], dependency_parser, xml_type, ) # Always return to directory where we started. os.chdir(starting_directory) if ERROR == True: sys.exit(-1) else: sys.exit(0)
def main(): """ Main program. """ global VERBOSE global DEPLOYMENT Parser = pinit() (opt, args) = Parser.parse_args() VERBOSE = opt.verbose_flag ConfigManager.ConfigManager.getInstance() # Check for BUILD_ROOT env. variable if ("BUILD_ROOT" in os.environ.keys()) == False: print("ERROR: Build root not set to root build path...") sys.exit(-1) else: # Handle BUILD_ROOT BUILD_ROOT = os.environ["BUILD_ROOT"] ModelParser.BUILD_ROOT = BUILD_ROOT if VERBOSE: print("BUILD_ROOT set to %s in environment" % BUILD_ROOT) # # Parse the input Topology XML filename # if len(args) == 0: print("ERROR: Usage: %s [options] xml_filename" % sys.argv[0]) return elif len(args) == 1: xml_filename = args[0] else: print("ERROR: Too many filenames, should only have one") return # # Check for BUILD_ROOT variable for XML port searches # if not opt.build_root_overwrite is None: set_build_roots(opt.build_root_overwrite) if VERBOSE: print("BUILD_ROOT set to %s" % ",".join(get_build_roots())) else: if ("BUILD_ROOT" in os.environ.keys()) == False: print("ERROR: Build root not set to root build path...") sys.exit(-1) set_build_roots(os.environ["BUILD_ROOT"]) if VERBOSE: print("BUILD_ROOT set to %s" % ",".join(get_build_roots())) if not "Ai" in xml_filename: print("ERROR: Missing Ai at end of file name...") raise OSError # # Create python dictionaries # xml_type = XmlParser.XmlParser(xml_filename)() # Only Topologies can be inputted if xml_type == "assembly" or xml_type == "deployment": if VERBOSE: print("Detected Topology XML so Generating Topology C++ Files...") the_parsed_topology_xml = XmlTopologyParser.XmlTopologyParser( xml_filename) DEPLOYMENT = the_parsed_topology_xml.get_deployment() if VERBOSE: print("Found assembly or deployment named: %s\n" % DEPLOYMENT) generate_pymods(the_parsed_topology_xml, xml_filename, opt) else: print("ERROR: Invalid XML found...this format not supported") sys.exit(-1) sys.exit(0)
def main(): parser = pinit() (opts, args) = parser.parse_args() Logger.connectOutputLogger(opts.logger_output) # Global logger init. below. PRINT = logging.getLogger("output") # # Parse the input Component XML file and create internal meta-model # if len(args) == 0: PRINT.info("\nUsage: %s [options] xml_filename" % sys.argv[0]) PRINT.info("ERROR: Cannot create dictionary\n") sys.exit(-1) return else: xmlFilename = args[0] # # Check for BUILD_ROOT variable for XML port searches # if not opts.build_root_overwrite is None: set_build_roots(opts.build_root_overwrite) else: if ("BUILD_ROOT" in os.environ.keys()) == False: print("ERROR: Build root not set to root build path...") sys.exit(-1) set_build_roots(os.environ["BUILD_ROOT"]) parsedTopology = XmlTopologyParser.XmlTopologyParser(xmlFilename) deployment = parsedTopology.get_deployment() outFilename = deployment + "Dictionary.json" outFilepath = "/".join([opts.work_path, outFilename]) descriptionFilename = "/".join([opts.work_path, "/dictPath.txt"]) dictionary = {} dictionary[deployment] = { "events": {}, "channels": {}, "commands": {}, "serializables": {}, } events = dictionary[deployment]["events"] channels = dictionary[deployment]["channels"] commands = dictionary[deployment]["commands"] serializables = dictionary[deployment]["serializables"] limitLabels = [ "low_red", "low_orange", "low_yellow", "high_yellow", "high_orange", "high_red", ] unitLabels = ["label", "gain", "offset"] instanceIDs = {"events": {}, "channels": {}, "commands": {}} for inst in parsedTopology.get_instances(): serializableFilenames = inst.get_comp_xml().get_serializable_type_files() for filename in serializableFilenames: for build_root in get_build_roots(): if os.path.exists(os.path.join(build_root, filename)): break else: raise FileNotFoundError(os.path.join(build_root, filename)) parsedSerializable = XmlSerializeParser.XmlSerializeParser( os.path.join(build_root, filename) ) name = parsedSerializable.get_name() namespace = parsedSerializable.get_namespace() members = [] membersRaw = parsedSerializable.get_members() for member in membersRaw: members.append( { "name": member[0], "type": format_type_item(member[1]), "size": member[2], "format_string": member[3], "comment": member[4], } ) metadata = { "name": name, "namespace": namespace, "members": members, } serializables[namespace + "::" + name] = metadata comp_name = inst.get_name() comp_type = inst.get_type() comp_namespace = inst.get_namespace() component = "::".join([comp_namespace, comp_type]) base_id = inst.get_base_id() if "0x" in base_id: base_id = int(base_id, 16) else: base_id = int(base_id) comp_parser = inst.get_comp_xml() comp_dir = dir(comp_parser) if "get_commands" in comp_dir: for command in comp_parser.get_commands(): opcode = command.get_opcodes()[0] opcode = int(opcode, 16) if ("0x" in opcode) else int(opcode) opcode += base_id name = command.get_mnemonic() if name in instanceIDs["commands"]: instanceIDs["commands"][name].append(opcode) else: instanceIDs["commands"][name] = [opcode] arguments = [] for arg in command.get_args(): typeItem = arg.get_type() typeObj = format_type_item(typeItem) arguments.append(typeObj) metadata = { "id": opcode, "name": name, "instance": comp_name, "description": command.get_comment(), "component": component, "arguments": arguments, } commands[opcode] = metadata if "get_events" in comp_dir: for event in comp_parser.get_events(): ev_id = event.get_ids()[0] ev_id = int(ev_id, 16) if ("0x" in ev_id) else int(ev_id) ev_id += base_id name = event.get_name() if name in instanceIDs["events"]: instanceIDs["events"][name].append(ev_id) else: instanceIDs["events"][name] = [ev_id] arguments = [] for arg in event.get_args(): typeItem = arg.get_type() typeObj = format_type_item(typeItem) arguments.append(typeObj) metadata = { "id": ev_id, "description": event.get_comment(), "name": name, "instance": comp_name, "component": component, "format_string": event.get_format_string(), "severity": event.get_severity(), "telem_type": "event", "arguments": arguments, } events[ev_id] = metadata if "get_channels" in comp_dir: for channel in comp_parser.get_channels(): ch_id = channel.get_ids()[0] ch_id = int(ch_id, 16) if ("0x" in ch_id) else int(ch_id) ch_id += base_id name = channel.get_name() if name in instanceIDs["channels"]: instanceIDs["channels"][name].append(ch_id) else: instanceIDs["channels"][name] = [ch_id] units = [] for unit in channel.get_units(): units.append(dict(list(zip(unitLabels, unit)))) typeObj = channel.get_type() type_name = "" if isinstance(typeObj, str): type_name = typeObj else: type_name = "Enum" enum_dict = {} for (i, enum) in enumerate(typeObj[1]): enum_dict[str(i)] = enum[0] metadata = { "id": ch_id, "name": name, "instance": comp_name, "description": channel.get_comment(), "telem_type": "channel", "component": component, "format_string": channel.get_format_string(), "limits": dict(list(zip(limitLabels, channel.get_limits()))), "type": type_name, "units": units, } if type_name == "Enum": metadata["enum_dict"] = enum_dict metadata["format_string"] = "%s" channels[ch_id] = metadata # Prepend instance name to commands, events, and channels with duplicate component types # PRINT.info(json.dumps(instanceIDs, indent=4)) for telemetryType, idDict in list(instanceIDs.items()): for name, ids in list(idDict.items()): if len(ids) > 1: for id in ids: telem = dictionary[deployment][telemetryType][id] name = telem["name"] instanceName = telem["instance"] name = "_".join([instanceName, name]) telem["name"] = name # Stringify JSON -- indent option makes it readable, can be removed if file # size is an issue jsonStr = json.dumps(dictionary, indent=4) # Create output directory if it doesn't exist directory = os.path.dirname(outFilepath) if not os.path.exists(directory): os.makedirs(directory) # Write JSON to file outFile = open(outFilepath, "w") outFile.write(jsonStr) descriptionFile = open(descriptionFilename, "w") descriptionFile.write(outFilepath) PRINT.info("\nJSON output written to %s" % outFilepath) outFile.close()