def main(): """ Main program. """ global VERBOSE # prevent local creation of variable global DEPLOYMENT # deployment set in topology xml only and used to install new instance dicts ConfigManager.ConfigManager.getInstance() Parser = pinit() (opt, args) = Parser.parse_args() VERBOSE = opt.verbose_flag # # Parse the input Topology XML filename # if len(args) == 0: print("Usage: %s [options] xml_filename" % sys.argv[0]) return elif len(args) == 1: xml_filename = args[0] else: print("ERROR: Too many filenames, should only have one") return # # Check for BUILD_ROOT variable for XML port searches # if not opt.build_root_overwrite is None: set_build_roots(opt.build_root_overwrite) if VERBOSE: print("BUILD_ROOT set to %s" % ",".join(get_build_roots())) else: if ("BUILD_ROOT" in os.environ.keys()) == False: print("ERROR: Build root not set to root build path...") sys.exit(-1) set_build_roots(os.environ["BUILD_ROOT"]) if VERBOSE: print("BUILD_ROOT set to %s" % ",".join(get_build_roots())) if not "Ai" in xml_filename: print("ERROR: Missing Ai at end of file name...") raise OSError xml_type = XmlParser.XmlParser(xml_filename)() if xml_type == "assembly" or xml_type == "deployment": if VERBOSE: print("Detected Topology XML so Generating Topology C++ Files...") the_parsed_topology_xml = XmlTopologyParser.XmlTopologyParser( xml_filename) DEPLOYMENT = the_parsed_topology_xml.get_deployment() print("Found assembly or deployment named: %s\n" % DEPLOYMENT) generate_xml_dict(the_parsed_topology_xml, xml_filename, opt) else: PRINT.info("Invalid XML found...this format not supported") sys.exit(-1) sys.exit(0)
def main(): # Configures output only to stdout. Logger.connectOutputLogger(None) xmlfile = "../../test/app1a/DuckAppAi.xml" print("Topology XML parse test (%s)" % xmlfile) # # Basic usage of this factory to create the component meta-model # the_parsed_topology_xml = XmlTopologyParser.XmlTopologyParser(xmlfile) top = TopoFactory().create(the_parsed_topology_xml) # # End of usage and comp is the instance of model to be used. # print("Topology: %s" % top) print("Namespace: %s" % top.get_namespace()) print("Comment: %s" % top.get_comment()) print() for component in top.get_comp_list(): print("Component") print(" Namespace: " + component.get_namespace()) print(" Name: " + component.get_name()) print(" Type: " + component.get_kind()) if component.get_comment() != None: print(" Comment: " + component.get_comment()) print(" Output Ports:") for port in component.get_ports(): print(" Name: " + port.get_name()) print(" Port Type: " + port.get_type()) print(" Direction: " + port.get_direction()) if port.get_sync() != None: print(" Sync: " + port.get_sync()) if port.get_comment() != None: print(" Comment: " + port.get_comment()) print(" Target Component: " + port.get_target_comp()) print(" Target Port: " + port.get_target_port()) print(" Target Type: " + port.get_target_type()) print(" Target Direction:" + port.get_target_direction()) print() print()
def main(): """ Main program. """ global VERBOSE global DEPLOYMENT Parser = pinit() (opt, args) = Parser.parse_args() VERBOSE = opt.verbose_flag CONFIG = ConfigManager.ConfigManager.getInstance() # Check for BUILD_ROOT env. variable if ('BUILD_ROOT' in os.environ.keys()) == False: print("ERROR: Build root not set to root build path...") sys.exit(-1) else: # Handle BUILD_ROOT BUILD_ROOT = os.environ['BUILD_ROOT'] ModelParser.BUILD_ROOT = BUILD_ROOT if VERBOSE: print("BUILD_ROOT set to %s in environment" % BUILD_ROOT) # # Parse the input Topology XML filename # if len(args) == 0: print("ERROR: Usage: %s [options] xml_filename" % sys.argv[0]) return elif len(args) == 1: xml_filename = args[0] else: print("ERROR: Too many filenames, should only have one") return # # Check for BUILD_ROOT variable for XML port searches # if not opt.build_root_overwrite == None: BUILD_ROOT = opt.build_root_overwrite ModelParser.BUILD_ROOT = BUILD_ROOT if VERBOSE: print("BUILD_ROOT set to %s" % BUILD_ROOT) else: if ('BUILD_ROOT' in os.environ.keys()) == False: print("ERROR: Build root not set to root build path...") sys.exit(-1) BUILD_ROOT = os.environ['BUILD_ROOT'] ModelParser.BUILD_ROOT = BUILD_ROOT if VERBOSE: print("BUILD_ROOT set to %s in environment" % BUILD_ROOT) if not "Ai" in xml_filename: print("ERROR: Missing Ai at end of file name...") raise IOError # # Create python dictionaries # xml_type = XmlParser.XmlParser(xml_filename)() # Only Topologies can be inputted if xml_type == "assembly" or xml_type == "deployment": if VERBOSE: print("Detected Topology XML so Generating Topology C++ Files...") the_parsed_topology_xml = XmlTopologyParser.XmlTopologyParser( xml_filename) DEPLOYMENT = the_parsed_topology_xml.get_deployment() if VERBOSE: print("Found assembly or deployment named: %s\n" % DEPLOYMENT) generate_pymods(the_parsed_topology_xml, xml_filename, opt) else: print("ERROR: Invalid XML found...this format not supported") sys.exit(-1) sys.exit(0)
def check_generated_files(testdir): """ Compares generated dictionary and pymods with original topology xml """ # Check that everything from parsed topology was generated topology = XmlTopologyParser.XmlTopologyParser( "TestTopologyAppAi.xml").get_instances() # Original TopologyAi.xml file inst1 = topology[0] inst2 = topology[1] # Dict objects inst1_events = [o for o in inst1.get_comp_xml().get_events()] inst2_events = [o for o in inst2.get_comp_xml().get_events()] inst1_commands = [o for o in inst1.get_comp_xml().get_commands()] inst2_commands = [o for o in inst2.get_comp_xml().get_commands()] inst1_channels = [o for o in inst1.get_comp_xml().get_channels()] inst2_channels = [o for o in inst2.get_comp_xml().get_channels()] inst1_parameters = [o for o in inst1.get_comp_xml().get_parameters()] inst2_parameters = [o for o in inst2.get_comp_xml().get_parameters()] # Names of dict objects inst1_event_names = [o.get_name() for o in inst1_events] inst2_event_names = [o.get_name() for o in inst2_events] inst1_command_names = [o.get_mnemonic() for o in inst1_commands] inst2_command_names = [o.get_mnemonic() for o in inst2_commands] inst1_channel_names = [o.get_name() for o in inst1_channels] inst2_channel_names = [o.get_name() for o in inst2_channels] inst1_parameter_names = [o.get_name() for o in inst1_parameters] inst2_parameter_names = [o.get_name() for o in inst2_parameters] # Parse out all arguments for commands and events inst1_command_args = [] for cmd in inst1_commands: for arg in cmd.get_args(): inst1_command_args.append(arg) inst2_command_args = [] for cmd in inst1_commands: for arg in cmd.get_args(): inst2_command_args.append(arg) inst1_event_args = [] for evr in inst1_events: for arg in evr.get_args(): inst1_event_args.append(arg) inst2_event_args = [] for evr in inst2_events: for arg in evr.get_args(): inst2_event_args.append(arg) # Command / Event names inst1_command_arg_names = [arg.get_name() for arg in inst1_command_args] inst2_command_arg_names = [arg.get_name() for arg in inst2_command_args] inst1_event_arg_names = [arg.get_name() for arg in inst1_event_args] inst2_event_arg_names = [arg.get_name() for arg in inst2_event_args] # Dict object types inst1_channel_types = [channel.get_type() for channel in inst1_channels] inst2_channel_types = [channel.get_type() for channel in inst2_channels] inst1_command_arg_types = [arg.get_type() for arg in inst1_command_args] inst2_command_arg_types = [arg.get_type() for arg in inst2_command_args] inst1_event_arg_types = [arg.get_type() for arg in inst1_event_args] inst2_event_arg_types = [arg.get_type() for arg in inst2_event_args] inst1_parameter_types = [param.get_type() for param in inst1_parameters] inst2_parameter_types = [param.get_type() for param in inst2_parameters] # Enums inst1_enums = get_enums_from_comp_xml(inst1.get_comp_xml()) inst2_enums = get_enums_from_comp_xml(inst2.get_comp_xml()) inst_enums = {} inst_enums["compxml"] = inst1_enums["compxml"] + inst2_enums["compxml"] inst_enums["imports"] = inst1_enums["imports"] + inst2_enums["imports"] # Serializables inst1_serials = get_serializables_from_comp_xml(inst1.get_comp_xml()) inst2_serials = get_serializables_from_comp_xml(inst2.get_comp_xml()) inst_serials = {} inst_serials[ "imports"] = inst1_serials["imports"] + inst2_serials["imports"] # GDS XML Dictionary dict_parser = XmlLoader() parsed_dict = dict_parser.get_xml_tree("TestTopologyAppDictionary.xml") parsed_commands = dict_parser.get_xml_section("commands", parsed_dict) parsed_events = dict_parser.get_xml_section("events", parsed_dict) parsed_commands = dict_parser.get_xml_section("commands", parsed_dict) parsed_channels = dict_parser.get_xml_section("channels", parsed_dict) parsed_parameters = dict_parser.get_xml_section("parameters", parsed_dict) parsed_serializables = dict_parser.get_xml_section("serializables", parsed_dict) parsed_enums = dict_parser.get_xml_section("enums", parsed_dict) ################################################################### # Run Enum Check # Compare TopAi.xml enums with GDS XML Dict enums compare_enums_ai_gds(inst_enums, parsed_enums) ################################################################### # Run Serializable Check # Compare TopAi.xml serializables with GDS XML Dict serializables compare_serials_ai_gds(inst_serials, parsed_serializables) ################################################################### # Before deep comparison between elements, check that number # of elements is same between dict and topology # Check if same # evrs, tlm, cmd, param if not len(inst1_events) + len(inst2_events) == len(parsed_events): print("ERROR: Not all events in topology were found in generated dict") assert False if not len(inst1_commands) + len(inst2_commands) == len(parsed_commands): print( "ERROR: Not all commands in topology were found in generated dict") assert False if not len(inst1_channels) + len(inst2_channels) == len(parsed_channels): print( "ERROR: Not all channels in topology were found in generated dict") assert False if not len(inst1_parameters) + len(inst2_parameters) == len( parsed_parameters): print( "ERROR: Not all parameters in topology were found in generated dict" ) assert False ################################################################### # Check that all items are the same from the topology and the generated dict # Check events for event in parsed_events: evr_comp = event.attrib["component"] evr_name = event.attrib["name"] if evr_comp == "Inst1": if not evr_name in inst1_event_names: print("ERROR: Event in Inst1 not found: {}".format(evr_name)) assert False elif evr_comp == "Inst2": if not evr_name in inst2_event_names: print("ERROR: Event in Inst2 not found: {}".format(evr_name)) assert False else: print("ERROR: Invalid component name {}".format(evr_comp)) assert False # Arg check for args in event: if len(args) > 0: for arg in args: arg_name = arg.attrib["name"] arg_type = arg.attrib["type"] if not (arg_name in inst1_event_arg_names or arg_name in inst2_event_arg_names): print("ERROR: event arg name {} for arg {} not found ". format(arg_name, evr_name) + "in topologyAi.xml") assert False if not (arg_type in inst1_event_arg_types or arg_type in inst2_event_arg_types): print("ERROR: event arg type {} for arg {} not found ". format(arg_type, evr_name)) assert False # Check commands for command in parsed_commands: cmd_comp = command.attrib["component"] cmd_name = command.attrib["mnemonic"] if cmd_comp == "Inst1": if not cmd_name in inst1_command_names: print("ERROR: Command in Inst1 not found: {}".format(cmd_name)) assert False elif cmd_comp == "Inst2": if not cmd_name in inst2_command_names: print("ERROR: Command in Inst2 not found: {}".format(cmd_name)) assert False else: print("ERROR: Invalid component name {}".format(cmd_comp)) assert False # Arg check for args in command: if len(args) > 1: for arg in args: arg_name = arg.attrib["name"] arg_type = arg.attrib["type"] if not (arg_name in inst1_command_arg_names or arg_name in inst2_command_arg_names): print("ERROR: event arg name {} for arg {} not found ". format(arg_name, cmd_name) + "in topologyAi.xml") assert False if not (arg_type in inst1_command_arg_types or arg_type in inst2_command_arg_types): print("ERROR: event arg type {} for arg {} not found ". format(arg_type, evr_name) + "in topologyAi.xml") assert False # Check channels for channel in parsed_channels: chan_comp = channel.attrib["component"] chan_name = channel.attrib["name"] if chan_comp == "Inst1": if not chan_name in inst1_channel_names: print( "ERROR: Channel in Inst1 not found: {}".format(chan_name)) assert False elif chan_comp == "Inst2": if not chan_name in inst2_channel_names: print( "ERROR: Channel in Inst2 not found: {}".format(chan_name)) assert False else: print("ERROR: Invalid component name {}".format(chan_comp)) assert False # Type check chan_type = channel.attrib["type"] if not (chan_type in inst1_channel_types or chan_type in inst2_channel_types): print( "ERROR: Channel type {} in channel {} not found in topologyAi.xml" .format(chan_type, chan_name)) assert False # Check parameters for parameter in parsed_parameters: param_comp = parameter.attrib["component"] param_name = parameter.attrib["name"] if param_comp == "Inst1": if not param_name in inst1_parameter_names: print("ERROR: Parameter in Inst1 not found: {}".format( param_name)) assert False elif param_comp == "Inst2": if not param_name in inst2_parameter_names: print("ERROR: Parameter in Inst2 not found: {}".format( param_name)) assert False else: print("ERROR: Invalid component name {}".format(param_comp)) assert False # Type check param_type = parameter.attrib["type"] if not (param_type in inst1_parameter_types or param_type in inst2_parameter_types): print( "ERROR: Parameter type {} in param {} not found in topologyAi.xml" .format(param_type, param_name)) assert False print("Checked dictionary item names, types and arguments") print("Generated Dictionary consistent with Topology") ################################################## # Check Pymodule consistency # Check commands expected_cmd_modules = [ "Inst1_" + o + ".py" for o in inst1_command_names ] + ["Inst2_" + o + ".py" for o in inst2_command_names] if os.path.exists(os.path.join(testdir, "commands")): files = os.listdir(os.path.join(testdir, "commands")) files = filter_non_src_files(files) for mod in expected_cmd_modules: if not mod in files: print("ERROR: python module {} not found in dictgen/commands". format(mod)) assert False if not len(expected_cmd_modules) == len(files): print("ERROR: Not all command python modules were generated") assert False else: if not len(expected_cmd_modules) == 0: print("ERROR: Command python modules were not generated") assert False # Check channels expected_chan_modules = [ "Inst1_" + o + ".py" for o in inst1_channel_names ] + ["Inst2_" + o + ".py" for o in inst2_channel_names] if os.path.exists(os.path.join(testdir, "channels")): files = os.listdir(os.path.join(testdir, "channels")) files = filter_non_src_files(files) for mod in expected_chan_modules: if not mod in files: print("ERROR: python module {} not found in dictgen/channels". format(mod)) assert False if not len(expected_chan_modules) == len(files): print("ERROR: Not all channel python modules were generated") assert False else: if not len(expected_chan_modules) == 0: print("ERROR: Channel python modules were not generated") assert False # Check events expected_evr_modules = [ "Inst1_" + o + ".py" for o in inst1_event_names ] + ["Inst2_" + o + ".py" for o in inst2_event_names] if os.path.exists(os.path.join(testdir, "events")): files = os.listdir(os.path.join(testdir, "events")) files = filter_non_src_files(files) for mod in expected_evr_modules: if not mod in files: print("ERROR: python module {} not found in dictgen/events". format(mod)) assert False if not len(expected_evr_modules) == len(files): print("ERROR: Not all event python modules were generated") assert False else: if not len(expected_evr_modules) == 0: print("ERROR: Event python modules were not generated") assert False # Check parameters expected_param_modules = [ "Inst1_" + o + ".py" for o in inst1_parameter_names ] + ["Inst2_" + o + ".py" for o in inst2_parameter_names] if os.path.exists(os.path.join(testdir, "parameters")): files = os.listdir(os.path.join(testdir, "parameters")) files = filter_non_src_files(files) for mod in expected_param_modules: if not mod in files: print( "ERROR: python module {} not found in dictgen/parameters". format(mod)) assert False if not len(expected_param_modules) == len(files): print("ERROR: Not all parameter python modules were generated") assert False else: if not len(expected_param_modules) == 0: print("ERROR: Parameter python modules were not generated") assert False
def main(): """ Main program. """ global ERROR # prevent local creation of variable global VERBOSE # prevent local creation of variable global GEN_TEST_CODE # indicate if test code should be generated global DEPLOYMENT # deployment set in topology xml only and used to install new instance dicts ERROR = False # Sets up the initial (singleton) instance ConfigManager.ConfigManager.getInstance() Parser = pinit() (opt, args) = Parser.parse_args() VERBOSE = opt.verbose_flag # Check that the specified working directory exists. Remember, the # default working directory is the current working directory which # always exists. We are basically only checking for when the user # specifies an alternate working directory. if os.path.exists(opt.work_path) == False: Parser.error(f"Specified path does not exist ({opt.work_path})!") working_dir = opt.work_path # Get the current working directory so that we can return to it when # the program completes. We always want to return to the place where # we started. starting_directory = os.getcwd() os.chdir(working_dir) # print working_dir # print os.getcwd() # Configure the logging. log_level = opt.logger.upper() log_level_dict = dict() log_level_dict["QUIET"] = None log_level_dict["DEBUG"] = logging.DEBUG log_level_dict["INFO"] = logging.INFO log_level_dict["WARNING"] = logging.WARN log_level_dict["ERROR"] = logging.ERROR log_level_dict["CRITICAL"] = logging.CRITICAL if log_level_dict[log_level] is None: stdout_enable = False else: stdout_enable = True log_fd = opt.logger_output # For now no log file Logger.connectDebugLogger(log_level_dict[log_level], log_fd, stdout_enable) Logger.connectOutputLogger(log_fd) # # Parse the input Component XML file and create internal meta-model # if len(args) == 0: PRINT.info(f"Usage: {sys.argv[0]} [options] xml_filename") return else: xml_filenames = args[0:] # # Check for BUILD_ROOT variable for XML port searches # if opt.build_root_flag == True: # Check for BUILD_ROOT env. variable if ("BUILD_ROOT" in list(os.environ.keys())) == False: PRINT.info( "ERROR: The -b command option requires that BUILD_ROOT environmental variable be set to root build path..." ) sys.exit(-1) else: set_build_roots(os.environ.get("BUILD_ROOT")) for xml_filename in xml_filenames: xml_type = XmlParser.XmlParser(xml_filename)() if xml_type == "component": DEBUG.info( "Detected Component XML so Generating Component C++ Files...") the_parsed_component_xml = XmlComponentParser.XmlComponentParser( xml_filename) generate_component(the_parsed_component_xml, os.path.basename(xml_filename), opt) dependency_parser = the_parsed_component_xml elif xml_type == "interface": DEBUG.info( "Detected Port type XML so Generating Port type C++ Files...") the_parsed_port_xml = XmlPortsParser.XmlPortsParser(xml_filename) generate_port(the_parsed_port_xml, os.path.basename(xml_filename)) dependency_parser = the_parsed_port_xml elif xml_type == "serializable": DEBUG.info( "Detected Serializable XML so Generating Serializable C++ Files..." ) the_serial_xml = XmlSerializeParser.XmlSerializeParser( xml_filename) generate_serializable(the_serial_xml, opt) dependency_parser = the_serial_xml elif xml_type == "assembly" or xml_type == "deployment": DEBUG.info( "Detected Topology XML so Generating Topology C++ Files...") the_parsed_topology_xml = XmlTopologyParser.XmlTopologyParser( xml_filename) DEPLOYMENT = the_parsed_topology_xml.get_deployment() print("Found assembly or deployment named: %s\n" % DEPLOYMENT) generate_topology(the_parsed_topology_xml, os.path.basename(xml_filename), opt) dependency_parser = the_parsed_topology_xml elif xml_type == "enum": DEBUG.info( "Detected Enum XML so Generating hpp, cpp, and py files...") curdir = os.getcwd() if EnumGenerator.generate_enum(xml_filename): ERROR = False PRINT.info( f"Completed generating files for {xml_filename} Enum XML...." ) else: ERROR = True os.chdir(curdir) elif xml_type == "array": DEBUG.info( "Detected Array XML so Generating hpp, cpp, and py files...") curdir = os.getcwd() if ArrayGenerator.generate_array(xml_filename): ERROR = False PRINT.info( f"Completed generating files for {xml_filename} Array XML..." ) else: ERROR = True os.chdir(curdir) else: PRINT.info("Invalid XML found...this format not supported") ERROR = True if opt.dependency_file is not None: if opt.build_root_flag: generate_dependency_file( opt.dependency_file, os.path.basename(xml_filename), list(get_build_roots())[0], dependency_parser, xml_type, ) # Always return to directory where we started. os.chdir(starting_directory) if ERROR == True: sys.exit(-1) else: sys.exit(0)
def gen_packet_file(self, xml_filename): view_path = "./Views" if not os.path.exists(view_path): os.mkdir(view_path) # Make sure files if not os.path.isfile(xml_filename): raise TlmPacketParseIOError("File %s does not exist!" % xml_filename) if not "PacketsAi" in xml_filename: raise IOError("ERROR: Missing PacketsAi at end of file name %s" % xml_filename) fd = open(xml_filename, "r") xml_parser = etree.XMLParser(remove_comments=True) element_tree = etree.parse(fd, parser=xml_parser) channel_size_dict = None ht = Template(header_file_template) it = Template(impl_file_template) if element_tree.getroot().tag == "packets": list_name = element_tree.getroot().attrib["name"] list_namespace = element_tree.getroot().attrib["namespace"] max_size = int(element_tree.getroot().attrib["size"]) # fill in template fields for header ht.packet_list_name = list_name ht.packet_list_namespace = list_namespace # fill in template fields for implementation file it.packet_list_name = list_name it.packet_list_namespace = list_namespace it.max_size = max_size packet_list_container = [] packetized_channel_list = [] it.ignore_list = [] id_list = [] # check for duplicates ignore_name_list = [] size_dict = {} ht.num_packets = 0 total_packet_size = 0 levels = [] view_path = "./Views" # find the topology import for entry in element_tree.getroot(): # read in topology file if entry.tag == "import_topology": top_file = search_for_file("Packet", entry.text) if top_file is None: raise TlmPacketParseIOError( "import file %s not found" % entry.text) the_parsed_topology_xml = XmlTopologyParser.XmlTopologyParser( top_file) deployment = the_parsed_topology_xml.get_deployment() if self.verbose: print("Found assembly or deployment named: %s\n" % deployment) channel_size_dict = self.generate_channel_size_dict( the_parsed_topology_xml, xml_filename) elif entry.tag == "packet": if channel_size_dict is None: raise TlmPacketParseValueError( "%s: Topology import must be before packet definitions" % xml_filename) packet_size = 0 packet_name = entry.attrib["name"] # Open a text file for a GDS view vfd = open("%s/%s.txt" % (view_path, packet_name), "w") packet_id = entry.attrib["id"] packet_level = entry.attrib["level"] print("Packetizing %s (%s)" % (packet_name, packet_id)) if packet_id in id_list: raise TlmPacketParseValueError( "Duplicate packet id %s" % packet_id) else: id_list.append(packet_id) channel_list = [] for channel in entry: channel_name = channel.attrib["name"] if not channel_name in channel_size_dict: raise TlmPacketParseValueError( "Channel %s does not exist" % channel_name) (channel_id, channel_size) = channel_size_dict[channel_name] packet_size += channel_size if self.verbose: print(" -Channel %s ID %d size %d" % (channel_name, channel_id, channel_size)) channel_list.append( (channel_id, channel_size, channel_name)) packetized_channel_list.append(channel_name) vfd.write("%s\n" % channel_name) packet_list_container.append( (packet_name, packet_id, packet_level, channel_list)) ht.num_packets += 1 packet_size += ( 11 + 2 + 4 ) # raw packet size + time tag + packet id + packet descriptor if packet_size > max_size: raise TlmPacketParseValueError( "Packet %s is too large. Size: %d max: %d" % (packet_name, packet_size, max_size)) print("Packet %s size %d/%d" % (packet_name, packet_size, max_size)) total_packet_size += packet_size if packet_level in size_dict: size_dict[packet_level] = size_dict[ packet_level] + packet_size else: size_dict[packet_level] = packet_size if not packet_level in levels: levels.append(packet_level) vfd.close() elif entry.tag == "ignore": if channel_size_dict is None: raise TlmPacketParseValueError( "%s: Topology import must be before packet definitions" % xml_filename) for channel in entry: channel_name = channel.attrib["name"] if not channel_name in channel_size_dict: raise TlmPacketParseValueError( "Channel %s does not exist" % channel_name) (channel_id, channel_size) = channel_size_dict[channel_name] it.ignore_list.append((channel_id, channel_name)) if self.verbose: print("Channel %s (%d) ignored" % (channel_name, channel_id)) ignore_name_list.append(channel_name) else: raise TlmPacketParseValueError("Invalid packet tag %s" % entry.tag) if self.verbose: print("Entry: %s" % entry.tag) else: raise TlmPacketParseValueError("Invalid xml type %s" % element_tree.getroot().tag) output_file_base = os.path.splitext( os.path.basename(xml_filename))[0].replace("Ai", "") file_dir = os.path.dirname(xml_filename).replace( get_nearest_build_root(xml_filename) + os.sep, "") missing_channels = False for channel in channel_size_dict: if (not channel in packetized_channel_list and not channel in ignore_name_list): (channel_id, channel_size) = channel_size_dict[channel] print("Channel %s (%d) not packetized or ignored." % (channel, channel_id)) missing_channels = True if missing_channels: raise TlmPacketParseValueError("Channels missing from packets") header = "%sAc.hpp" % output_file_base source = "%sAc.cpp" % output_file_base print("Generating %s and %s" % (header, source)) levels.sort() for level in levels: print("Level: %s Bytes: %d bits: %d" % (level, size_dict[level], size_dict[level] * 8)) print("Number of packets: %d\nTotal packet bytes: %d bits: %d" % (ht.num_packets, total_packet_size, total_packet_size * 8)) it.packet_list = packet_list_container it.output_header = "%s/%sAc.hpp" % (file_dir, output_file_base) open(header, "w").write(str(ht)) open(source, "w").write(str(it)) target_directory = os.getcwd().replace("\\", os.sep) header_target = target_directory + os.sep + header source_target = target_directory + os.sep + source # write dependency file if self.dependency is not None: dependency_file_txt = "\n%s %s: %s\n" % ( source_target, header_target, top_file, ) open(self.dependency, "w").write(dependency_file_txt)
def parse_topology(self, xml_filename, overwrite = True): """ Takes an XML File and puts all channel, event, and command data into CosmosChannel, CosmosEvent, and CosmosCommand model class instances to be passed to the Generator that creates the config files @param xml_filename: XML File Name, should be Topology if not quits @param overwrite: Flag whether to overwrite channels, events, and commands lists """ bot_dir = os.getcwd() os.chdir(CosmosUtil.STARTING_DIRECTORY) # Parser needs to be in Autocoders/bin directory to be able to find Topology XML print("\nUsing XmlParser and XmlTopologyParser instances") xml_type = XmlParser.XmlParser(xml_filename)() if xml_type == "assembly" or xml_type == "deployment": if CosmosUtil.VERBOSE: print("Detected ISF Topology XML Files...") topology = XmlTopologyParser.XmlTopologyParser(xml_filename) # Name of COSMOS target to be created self.deployment = topology.get_deployment() if CosmosUtil.VERBOSE: print("\nFound assembly or deployment named: " + self.deployment + "\n") else: print("ERROR: XML File Not a Topology File") sys.exit(-1) # Change back os.chdir(bot_dir) print("Finished Reusing XmlParser and XmlTopologyParser instances\n") if overwrite: self.channels = [] self.events = [] self.commands = [] print("Parsing Topology") print("Found %s components.\n" % len(topology.get_instances())) for inst in topology.get_instances(): comp_name = inst.get_name() comp_type = inst.get_type() base_id = inst.get_base_id() #print comp_name, comp_type, base_id # # If base_id is not set for a component assume it has nothing # and skip it. if base_id is None: print("Skipping %s:%s component - has not commands or telemetry" % (comp_name, comp_type)) continue if '0x' in base_id: base_id = int(base_id, 16) else: base_id = int(base_id) comp_parser = inst.get_comp_xml() # # Parse command data here... # if 'get_commands' in dir(comp_parser): if CosmosUtil.VERBOSE: print ("Parsing Commands for instance: " + comp_name) cmds = comp_parser.get_commands() for cmd in cmds: opcode = cmd.get_opcodes()[0] if '0x' in opcode: opcode = int(opcode, 16) else: opcode = int(opcode) opcode += base_id n = cmd.get_mnemonic() c = cmd.get_comment() p = cmd.get_priority() s = cmd.get_sync() f = cmd.get_full() source = comp_parser.get_xml_filename() cosmos_cmd = CosmosCommand.CosmosCommand(n, opcode, c) cosmos_cmd.set_component_attributes(comp_name, comp_type, source) cosmos_cmd.set_xml_attributes(p, s, f) # Count strings to see if 2 (if so needs block argument) string_count = 0 args = cmd.get_args() for arg in args: t = arg.get_type() if t == 'string': string_count += 1 is_multi_string_command = False if string_count >= 2: is_multi_string_command = True # # Parse command arg data here... # num = 0 if CosmosUtil.VERBOSE: print("Command " + n + " Found") for arg in args: n = arg.get_name() t = arg.get_type() c = arg.get_comment() # # Parse command enum here # if type(t) is type(tuple()): enum = t t = t[0][0] num += 1 if not is_multi_string_command: cosmos_cmd.add_item(n, t, c, enum) if is_multi_string_command: if CosmosUtil.VERBOSE: print("Multi-string commands not supported in COSMOS at: " + cmd.get_mnemonic() + " from " + source) else: print("Multi-string command " + cmd.get_mnemonic() + " not supported") else: self.commands.append(cosmos_cmd) if CosmosUtil.VERBOSE: print("Finished Parsing Commands for " + comp_name) # # Parse parameter data here... # if 'get_parameters' in dir(comp_parser): for prm in comp_parser.get_parameters(): enum = None n = prm.get_name() s = prm.get_size() t = prm.get_type() c = prm.get_comment() d = prm.get_default() source = comp_parser.get_xml_filename() # Parse param enum if type(t) is type(tuple()): enum = t t = t[0][0] else: if not t in CosmosUtil.TYPE_DICT.keys(): # Skip channel if has a serializable type / incorrect type print("Unsupported type " + t + ", skipping Parameter " + n) continue num += 1 # Calculate opcodes set_opcode = prm.get_set_opcodes()[0] if '0x' in set_opcode: set_opcode = int(set_opcode, 16) else: set_opcode = int(set_opcode) set_opcode += base_id save_opcode = prm.get_save_opcodes()[0] if '0x' in save_opcode: save_opcode = int(save_opcode, 16) else: save_opcode = int(save_opcode) save_opcode += base_id # Create models cosmos_prm_set = CosmosCommand.CosmosCommand((n + "_prm_set"), set_opcode, c) cosmos_prm_save = CosmosCommand.CosmosCommand((n + "_prm_save"), save_opcode, c) cosmos_prm_set.set_component_attributes(comp_name, comp_type, source) cosmos_prm_save.set_component_attributes(comp_name, comp_type, source) # Add single arguments cosmos_prm_set.add_item(n, t, c, enum, d) self.commands.append(cosmos_prm_set) self.commands.append(cosmos_prm_save) # # Parse event data here... # if "get_events" in dir(comp_parser): if CosmosUtil.VERBOSE: print("Parsing Events for " + comp_name) evrs = comp_parser.get_events() for evr in evrs: contains_unsupp_type = False evr_id =evr.get_ids()[0] if '0x' in evr_id: evr_id = int(evr_id, 16) else: evr_id = int(evr_id) evr_id += base_id n = evr.get_name() comment = evr.get_comment() s = evr.get_severity() f = evr.get_format_string() source = comp_parser.get_xml_filename() cosmos_evr = CosmosEvent.CosmosEvent(n, evr_id, comment) cosmos_evr.set_component_attributes(comp_name, comp_type, source) cosmos_evr.set_xml_attributes(s, f) if CosmosUtil.VERBOSE: print("Event " + n + " Found") # # Parse event enums here... # bit_count = 0 args = evr.get_args() for arg in args: n = arg.get_name() t = arg.get_type() s = arg.get_size() c = arg.get_comment() enum = None if type(t) is type(tuple()): enum = t t = t[0][0] cosmos_evr.add_item(n, t, c, enum) if not contains_unsupp_type: self.events.append(cosmos_evr) else: print("Skipping evr " + evr.get_name() + ", contains unsupported type") if CosmosUtil.VERBOSE: print("Finished Parsing Events for " + comp_name) # # Parse channel data here... # if "get_channels" in dir(comp_parser): if CosmosUtil.VERBOSE: print("Parsing Channels for " + comp_name) channels = comp_parser.get_channels() for ch in channels: ch_id = ch.get_ids()[0] if '0x' in ch_id: ch_id = int(ch_id, 16) else: ch_id = int(ch_id) ch_id += base_id n = ch.get_name() t = ch.get_type() enum = None if type(t) is type(tuple()): enum = t t = t[0][0] else: if not t in CosmosUtil.TYPE_DICT.keys(): # Skip channel if has a serializable type / incorrect type print("Unsupported type " + t + ", skipping Channel " + n) continue c = ch.get_comment() limits = ch.get_limits() source = comp_parser.get_xml_filename() cosmos_ch = CosmosChannel.CosmosChannel(n, ch_id, c) cosmos_ch.set_component_attributes(comp_name, comp_type, source) cosmos_ch.set_item(t, enum, ch.get_format_string()) cosmos_ch.set_limits(limits) if CosmosUtil.VERBOSE: print("Found channel " + n + " with argument type: " + t) self.channels.append(cosmos_ch) if CosmosUtil.VERBOSE: print("Finished Parsing Channels for " + comp_name) # Check command and EVR packets to see if they should apply a negative offset # NO CMD OR TLM PACKETS SHOULD BE ADDED ONCE THIS CHECK IS DONE for evr in self.events: CheetahUtil.evr_update_variable_lengths(evr) for cmd in self.commands: CheetahUtil.cmd_update_variable_lengths(cmd) print("Parsed Topology\n")
def main(): parser = pinit() (opts, args) = parser.parse_args() Logger.connectOutputLogger(opts.logger_output) # Global logger init. below. PRINT = logging.getLogger("output") # # Parse the input Component XML file and create internal meta-model # if len(args) == 0: PRINT.info("\nUsage: %s [options] xml_filename" % sys.argv[0]) PRINT.info("ERROR: Cannot create dictionary\n") sys.exit(-1) return else: xmlFilename = args[0] # # Check for BUILD_ROOT variable for XML port searches # if not opts.build_root_overwrite is None: set_build_roots(opts.build_root_overwrite) else: if ("BUILD_ROOT" in os.environ.keys()) == False: print("ERROR: Build root not set to root build path...") sys.exit(-1) set_build_roots(os.environ["BUILD_ROOT"]) parsedTopology = XmlTopologyParser.XmlTopologyParser(xmlFilename) deployment = parsedTopology.get_deployment() outFilename = deployment + "Dictionary.json" outFilepath = "/".join([opts.work_path, outFilename]) descriptionFilename = "/".join([opts.work_path, "/dictPath.txt"]) dictionary = {} dictionary[deployment] = { "events": {}, "channels": {}, "commands": {}, "serializables": {}, } events = dictionary[deployment]["events"] channels = dictionary[deployment]["channels"] commands = dictionary[deployment]["commands"] serializables = dictionary[deployment]["serializables"] limitLabels = [ "low_red", "low_orange", "low_yellow", "high_yellow", "high_orange", "high_red", ] unitLabels = ["label", "gain", "offset"] instanceIDs = {"events": {}, "channels": {}, "commands": {}} for inst in parsedTopology.get_instances(): serializableFilenames = inst.get_comp_xml().get_serializable_type_files() for filename in serializableFilenames: for build_root in get_build_roots(): if os.path.exists(os.path.join(build_root, filename)): break else: raise FileNotFoundError(os.path.join(build_root, filename)) parsedSerializable = XmlSerializeParser.XmlSerializeParser( os.path.join(build_root, filename) ) name = parsedSerializable.get_name() namespace = parsedSerializable.get_namespace() members = [] membersRaw = parsedSerializable.get_members() for member in membersRaw: members.append( { "name": member[0], "type": format_type_item(member[1]), "size": member[2], "format_string": member[3], "comment": member[4], } ) metadata = { "name": name, "namespace": namespace, "members": members, } serializables[namespace + "::" + name] = metadata comp_name = inst.get_name() comp_type = inst.get_type() comp_namespace = inst.get_namespace() component = "::".join([comp_namespace, comp_type]) base_id = inst.get_base_id() if "0x" in base_id: base_id = int(base_id, 16) else: base_id = int(base_id) comp_parser = inst.get_comp_xml() comp_dir = dir(comp_parser) if "get_commands" in comp_dir: for command in comp_parser.get_commands(): opcode = command.get_opcodes()[0] opcode = int(opcode, 16) if ("0x" in opcode) else int(opcode) opcode += base_id name = command.get_mnemonic() if name in instanceIDs["commands"]: instanceIDs["commands"][name].append(opcode) else: instanceIDs["commands"][name] = [opcode] arguments = [] for arg in command.get_args(): typeItem = arg.get_type() typeObj = format_type_item(typeItem) arguments.append(typeObj) metadata = { "id": opcode, "name": name, "instance": comp_name, "description": command.get_comment(), "component": component, "arguments": arguments, } commands[opcode] = metadata if "get_events" in comp_dir: for event in comp_parser.get_events(): ev_id = event.get_ids()[0] ev_id = int(ev_id, 16) if ("0x" in ev_id) else int(ev_id) ev_id += base_id name = event.get_name() if name in instanceIDs["events"]: instanceIDs["events"][name].append(ev_id) else: instanceIDs["events"][name] = [ev_id] arguments = [] for arg in event.get_args(): typeItem = arg.get_type() typeObj = format_type_item(typeItem) arguments.append(typeObj) metadata = { "id": ev_id, "description": event.get_comment(), "name": name, "instance": comp_name, "component": component, "format_string": event.get_format_string(), "severity": event.get_severity(), "telem_type": "event", "arguments": arguments, } events[ev_id] = metadata if "get_channels" in comp_dir: for channel in comp_parser.get_channels(): ch_id = channel.get_ids()[0] ch_id = int(ch_id, 16) if ("0x" in ch_id) else int(ch_id) ch_id += base_id name = channel.get_name() if name in instanceIDs["channels"]: instanceIDs["channels"][name].append(ch_id) else: instanceIDs["channels"][name] = [ch_id] units = [] for unit in channel.get_units(): units.append(dict(list(zip(unitLabels, unit)))) typeObj = channel.get_type() type_name = "" if isinstance(typeObj, str): type_name = typeObj else: type_name = "Enum" enum_dict = {} for (i, enum) in enumerate(typeObj[1]): enum_dict[str(i)] = enum[0] metadata = { "id": ch_id, "name": name, "instance": comp_name, "description": channel.get_comment(), "telem_type": "channel", "component": component, "format_string": channel.get_format_string(), "limits": dict(list(zip(limitLabels, channel.get_limits()))), "type": type_name, "units": units, } if type_name == "Enum": metadata["enum_dict"] = enum_dict metadata["format_string"] = "%s" channels[ch_id] = metadata # Prepend instance name to commands, events, and channels with duplicate component types # PRINT.info(json.dumps(instanceIDs, indent=4)) for telemetryType, idDict in list(instanceIDs.items()): for name, ids in list(idDict.items()): if len(ids) > 1: for id in ids: telem = dictionary[deployment][telemetryType][id] name = telem["name"] instanceName = telem["instance"] name = "_".join([instanceName, name]) telem["name"] = name # Stringify JSON -- indent option makes it readable, can be removed if file # size is an issue jsonStr = json.dumps(dictionary, indent=4) # Create output directory if it doesn't exist directory = os.path.dirname(outFilepath) if not os.path.exists(directory): os.makedirs(directory) # Write JSON to file outFile = open(outFilepath, "w") outFile.write(jsonStr) descriptionFile = open(descriptionFilename, "w") descriptionFile.write(outFilepath) PRINT.info("\nJSON output written to %s" % outFilepath) outFile.close()