def main(): # Configures output only to stdout. Logger.connectOutputLogger(None) GenFactory.getInstance().configureVisitor("TestComponentH", "ComponentHVisitor", True, True) GenFactory.getInstance().configureVisitor("TestComponentCpp", "ComponentCppVisitor", True, True) initfiles = GenFactory.getInstance().create('initFiles') print(initfiles) initfiles('object args')
def main(): # Configures output only to stdout. Logger.connectOutputLogger(None) xmlfile = "../../test/app1a/DuckAppAi.xml" print("Topology XML parse test (%s)" % xmlfile) # # Basic usage of this factory to create the component meta-model # the_parsed_topology_xml = XmlTopologyParser.XmlTopologyParser(xmlfile) top = TopoFactory().create(the_parsed_topology_xml) # # End of usage and comp is the instance of model to be used. # print("Topology: %s" % top) print("Namespace: %s" % top.get_namespace()) print("Comment: %s" % top.get_comment()) print() for component in top.get_comp_list(): print("Component") print(" Namespace: " + component.get_namespace()) print(" Name: " + component.get_name()) print(" Type: " + component.get_kind()) if component.get_comment() != None: print(" Comment: " + component.get_comment()) print(" Output Ports:") for port in component.get_ports(): print(" Name: " + port.get_name()) print(" Port Type: " + port.get_type()) print(" Direction: " + port.get_direction()) if port.get_sync() != None: print(" Sync: " + port.get_sync()) if port.get_comment() != None: print(" Comment: " + port.get_comment()) print(" Target Component: " + port.get_target_comp()) print(" Target Port: " + port.get_target_port()) print(" Target Type: " + port.get_target_type()) print(" Target Direction:" + port.get_target_direction()) print() print()
def main(): # Configures output only to stdout. Logger.connectOutputLogger(None) xmlfile = "../../test/app1a/NikonComponentAi.xml" print("Component XML parse test (%s)" % xmlfile) # # Basic usage of this factory to create the component meta-model # parsed_port_xml_list = [] the_parsed_component_xml = XmlComponentParser.XmlComponentParser(xmlfile) port_type_files_list = the_parsed_component_xml.get_port_type_files() for port_file in port_type_files_list: #print "Parsing %s" % port_file parsed_port_xml_list.append(XmlPortsParser.XmlPortsParser(port_file)) #print parsed_port_xml_list comp = CompFactory().create(the_parsed_component_xml, parsed_port_xml_list) # # End of usage and comp is the instance of model to be used. # print("Component: %s" % comp) print("Namespace: %s" % comp.get_namespace()) print("Name: %s" % comp.get_name()) print("Kind: %s" % comp.get_kind()) print("Comment: %s" % comp.get_comment()) print("Ports: %s" % comp.get_ports()) print() for port in comp.get_ports(): print("Port:") print(port.get_name(), port.get_type(), port.get_direction(), port.get_sync(), port.get_namespace()) print(port.get_comment()) print(port.get_ifcomment()) print(port.get_includes()) print(port.get_args()) for arg in port.get_args(): print("\t%s : %s # %s" % (arg.get_name(), arg.get_type(), arg.get_comment()))
def main(): # Configures output only to stdout. Logger.connectOutputLogger(None) xmlfile = "../../test/Msg1InterfaceAi.xml" print("Port XML parse test (%s)" % xmlfile) # # Basic usage of this factory to create the component meta-model # the_parsed_port_xml = XmlPortsParser.XmlPortsParser(xmlfile) print(the_parsed_port_xml.get_args()) args = the_parsed_port_xml.get_args() for a in args: print("\t", a) print("\t", a.get_name()) print("\t", a.get_type()) print("\t", a.get_comment()) print(the_parsed_port_xml.get_include_header_files()) print(the_parsed_port_xml.get_interface()) print(the_parsed_port_xml.get_interface().get_comment()) print(the_parsed_port_xml.get_interface().get_name()) print(the_parsed_port_xml.get_interface().get_namespace()) port = PortFactory().create(the_parsed_port_xml) # # End of usage and port is the instance of model to be used. # print("Port: %s" % port) print("Namespace: %s" % port.get_namespace()) print("Type: %s" % port.get_type()) print("Comment: %s" % port.get_ifcomment()) print("Includes: %s" % port.get_includes()) print() args = port.get_args() print("Args: %s" % args) for a in args: print("Arg Name: %s Type: %s Comment: %s" % (a.get_name(), a.get_type(), a.get_comment()))
def main(): """ Main program. """ global VERBOSE global BUILD_ROOT global DEPLOYMENT # Enable printing model error output to STDOUT Logger.connectOutputLogger(None) Parser = pinit() (opt, args) = Parser.parse_args() VERBOSE = opt.verbose_flag ConfigManager.ConfigManager.getInstance() # # Handle command line arguments # # # Parse the input Topology XML filename # if len(args) == 0: print("ERROR: Usage: %s [options] xml_filename" % sys.argv[0]) return elif len(args) == 1: xml_filename = args[0] else: print("ERROR: Too many filenames, should only have one") return # # Check for BUILD_ROOT variable for XML port searches # if not opt.build_root_overwrite is None: set_build_roots(opt.build_root_overwrite) if VERBOSE: print("BUILD_ROOT set to %s" % ",".join(get_build_roots())) else: if ("BUILD_ROOT" in os.environ.keys()) == False: print("ERROR: Build root not set to root build path...") sys.exit(-1) set_build_roots(os.environ["BUILD_ROOT"]) if VERBOSE: print("BUILD_ROOT set to %s" % ",".join(get_build_roots())) # # Write test component # if not "Ai" in xml_filename: print("ERROR: Missing Ai at end of file name...") raise OSError # # Create python dictionaries # print(xml_filename) xml_type = XmlParser.XmlParser(xml_filename)() # Only Components can be inputted if xml_type == "component": if VERBOSE: print("Detected Component XML so GeneratingComponent C++ Files...") the_parsed_component_xml = XmlComponentParser.XmlComponentParser( xml_filename) component_model = parse_component(the_parsed_component_xml, xml_filename, opt) if VERBOSE: print("\nGenerating tests...") generate_impl_files(opt, component_model) else: print("ERROR: {} is used for component XML files, not {} XML files". format(sys.argv[0], xml_type)) sys.exit(-1) sys.exit(0)
def main(): """ Main program. """ global ERROR # prevent local creation of variable global VERBOSE # prevent local creation of variable global GEN_TEST_CODE # indicate if test code should be generated global DEPLOYMENT # deployment set in topology xml only and used to install new instance dicts ERROR = False # Sets up the initial (singleton) instance ConfigManager.ConfigManager.getInstance() Parser = pinit() (opt, args) = Parser.parse_args() VERBOSE = opt.verbose_flag # Check that the specified working directory exists. Remember, the # default working directory is the current working directory which # always exists. We are basically only checking for when the user # specifies an alternate working directory. if os.path.exists(opt.work_path) == False: Parser.error(f"Specified path does not exist ({opt.work_path})!") working_dir = opt.work_path # Get the current working directory so that we can return to it when # the program completes. We always want to return to the place where # we started. starting_directory = os.getcwd() os.chdir(working_dir) # print working_dir # print os.getcwd() # Configure the logging. log_level = opt.logger.upper() log_level_dict = dict() log_level_dict["QUIET"] = None log_level_dict["DEBUG"] = logging.DEBUG log_level_dict["INFO"] = logging.INFO log_level_dict["WARNING"] = logging.WARN log_level_dict["ERROR"] = logging.ERROR log_level_dict["CRITICAL"] = logging.CRITICAL if log_level_dict[log_level] is None: stdout_enable = False else: stdout_enable = True log_fd = opt.logger_output # For now no log file Logger.connectDebugLogger(log_level_dict[log_level], log_fd, stdout_enable) Logger.connectOutputLogger(log_fd) # # Parse the input Component XML file and create internal meta-model # if len(args) == 0: PRINT.info(f"Usage: {sys.argv[0]} [options] xml_filename") return else: xml_filenames = args[0:] # # Check for BUILD_ROOT variable for XML port searches # if opt.build_root_flag == True: # Check for BUILD_ROOT env. variable if ("BUILD_ROOT" in list(os.environ.keys())) == False: PRINT.info( "ERROR: The -b command option requires that BUILD_ROOT environmental variable be set to root build path..." ) sys.exit(-1) else: set_build_roots(os.environ.get("BUILD_ROOT")) for xml_filename in xml_filenames: xml_type = XmlParser.XmlParser(xml_filename)() if xml_type == "component": DEBUG.info( "Detected Component XML so Generating Component C++ Files...") the_parsed_component_xml = XmlComponentParser.XmlComponentParser( xml_filename) generate_component(the_parsed_component_xml, os.path.basename(xml_filename), opt) dependency_parser = the_parsed_component_xml elif xml_type == "interface": DEBUG.info( "Detected Port type XML so Generating Port type C++ Files...") the_parsed_port_xml = XmlPortsParser.XmlPortsParser(xml_filename) generate_port(the_parsed_port_xml, os.path.basename(xml_filename)) dependency_parser = the_parsed_port_xml elif xml_type == "serializable": DEBUG.info( "Detected Serializable XML so Generating Serializable C++ Files..." ) the_serial_xml = XmlSerializeParser.XmlSerializeParser( xml_filename) generate_serializable(the_serial_xml, opt) dependency_parser = the_serial_xml elif xml_type == "assembly" or xml_type == "deployment": DEBUG.info( "Detected Topology XML so Generating Topology C++ Files...") the_parsed_topology_xml = XmlTopologyParser.XmlTopologyParser( xml_filename) DEPLOYMENT = the_parsed_topology_xml.get_deployment() print("Found assembly or deployment named: %s\n" % DEPLOYMENT) generate_topology(the_parsed_topology_xml, os.path.basename(xml_filename), opt) dependency_parser = the_parsed_topology_xml elif xml_type == "enum": DEBUG.info( "Detected Enum XML so Generating hpp, cpp, and py files...") curdir = os.getcwd() if EnumGenerator.generate_enum(xml_filename): ERROR = False PRINT.info( f"Completed generating files for {xml_filename} Enum XML...." ) else: ERROR = True os.chdir(curdir) elif xml_type == "array": DEBUG.info( "Detected Array XML so Generating hpp, cpp, and py files...") curdir = os.getcwd() if ArrayGenerator.generate_array(xml_filename): ERROR = False PRINT.info( f"Completed generating files for {xml_filename} Array XML..." ) else: ERROR = True os.chdir(curdir) else: PRINT.info("Invalid XML found...this format not supported") ERROR = True if opt.dependency_file is not None: if opt.build_root_flag: generate_dependency_file( opt.dependency_file, os.path.basename(xml_filename), list(get_build_roots())[0], dependency_parser, xml_type, ) # Always return to directory where we started. os.chdir(starting_directory) if ERROR == True: sys.exit(-1) else: sys.exit(0)
def main(): parser = pinit() (opts, args) = parser.parse_args() Logger.connectOutputLogger(opts.logger_output) # Global logger init. below. PRINT = logging.getLogger("output") # # Parse the input Component XML file and create internal meta-model # if len(args) == 0: PRINT.info("\nUsage: %s [options] xml_filename" % sys.argv[0]) PRINT.info("ERROR: Cannot create dictionary\n") sys.exit(-1) return else: xmlFilename = args[0] # # Check for BUILD_ROOT variable for XML port searches # if not opts.build_root_overwrite is None: set_build_roots(opts.build_root_overwrite) else: if ("BUILD_ROOT" in os.environ.keys()) == False: print("ERROR: Build root not set to root build path...") sys.exit(-1) set_build_roots(os.environ["BUILD_ROOT"]) parsedTopology = XmlTopologyParser.XmlTopologyParser(xmlFilename) deployment = parsedTopology.get_deployment() outFilename = deployment + "Dictionary.json" outFilepath = "/".join([opts.work_path, outFilename]) descriptionFilename = "/".join([opts.work_path, "/dictPath.txt"]) dictionary = {} dictionary[deployment] = { "events": {}, "channels": {}, "commands": {}, "serializables": {}, } events = dictionary[deployment]["events"] channels = dictionary[deployment]["channels"] commands = dictionary[deployment]["commands"] serializables = dictionary[deployment]["serializables"] limitLabels = [ "low_red", "low_orange", "low_yellow", "high_yellow", "high_orange", "high_red", ] unitLabels = ["label", "gain", "offset"] instanceIDs = {"events": {}, "channels": {}, "commands": {}} for inst in parsedTopology.get_instances(): serializableFilenames = inst.get_comp_xml().get_serializable_type_files() for filename in serializableFilenames: for build_root in get_build_roots(): if os.path.exists(os.path.join(build_root, filename)): break else: raise FileNotFoundError(os.path.join(build_root, filename)) parsedSerializable = XmlSerializeParser.XmlSerializeParser( os.path.join(build_root, filename) ) name = parsedSerializable.get_name() namespace = parsedSerializable.get_namespace() members = [] membersRaw = parsedSerializable.get_members() for member in membersRaw: members.append( { "name": member[0], "type": format_type_item(member[1]), "size": member[2], "format_string": member[3], "comment": member[4], } ) metadata = { "name": name, "namespace": namespace, "members": members, } serializables[namespace + "::" + name] = metadata comp_name = inst.get_name() comp_type = inst.get_type() comp_namespace = inst.get_namespace() component = "::".join([comp_namespace, comp_type]) base_id = inst.get_base_id() if "0x" in base_id: base_id = int(base_id, 16) else: base_id = int(base_id) comp_parser = inst.get_comp_xml() comp_dir = dir(comp_parser) if "get_commands" in comp_dir: for command in comp_parser.get_commands(): opcode = command.get_opcodes()[0] opcode = int(opcode, 16) if ("0x" in opcode) else int(opcode) opcode += base_id name = command.get_mnemonic() if name in instanceIDs["commands"]: instanceIDs["commands"][name].append(opcode) else: instanceIDs["commands"][name] = [opcode] arguments = [] for arg in command.get_args(): typeItem = arg.get_type() typeObj = format_type_item(typeItem) arguments.append(typeObj) metadata = { "id": opcode, "name": name, "instance": comp_name, "description": command.get_comment(), "component": component, "arguments": arguments, } commands[opcode] = metadata if "get_events" in comp_dir: for event in comp_parser.get_events(): ev_id = event.get_ids()[0] ev_id = int(ev_id, 16) if ("0x" in ev_id) else int(ev_id) ev_id += base_id name = event.get_name() if name in instanceIDs["events"]: instanceIDs["events"][name].append(ev_id) else: instanceIDs["events"][name] = [ev_id] arguments = [] for arg in event.get_args(): typeItem = arg.get_type() typeObj = format_type_item(typeItem) arguments.append(typeObj) metadata = { "id": ev_id, "description": event.get_comment(), "name": name, "instance": comp_name, "component": component, "format_string": event.get_format_string(), "severity": event.get_severity(), "telem_type": "event", "arguments": arguments, } events[ev_id] = metadata if "get_channels" in comp_dir: for channel in comp_parser.get_channels(): ch_id = channel.get_ids()[0] ch_id = int(ch_id, 16) if ("0x" in ch_id) else int(ch_id) ch_id += base_id name = channel.get_name() if name in instanceIDs["channels"]: instanceIDs["channels"][name].append(ch_id) else: instanceIDs["channels"][name] = [ch_id] units = [] for unit in channel.get_units(): units.append(dict(list(zip(unitLabels, unit)))) typeObj = channel.get_type() type_name = "" if isinstance(typeObj, str): type_name = typeObj else: type_name = "Enum" enum_dict = {} for (i, enum) in enumerate(typeObj[1]): enum_dict[str(i)] = enum[0] metadata = { "id": ch_id, "name": name, "instance": comp_name, "description": channel.get_comment(), "telem_type": "channel", "component": component, "format_string": channel.get_format_string(), "limits": dict(list(zip(limitLabels, channel.get_limits()))), "type": type_name, "units": units, } if type_name == "Enum": metadata["enum_dict"] = enum_dict metadata["format_string"] = "%s" channels[ch_id] = metadata # Prepend instance name to commands, events, and channels with duplicate component types # PRINT.info(json.dumps(instanceIDs, indent=4)) for telemetryType, idDict in list(instanceIDs.items()): for name, ids in list(idDict.items()): if len(ids) > 1: for id in ids: telem = dictionary[deployment][telemetryType][id] name = telem["name"] instanceName = telem["instance"] name = "_".join([instanceName, name]) telem["name"] = name # Stringify JSON -- indent option makes it readable, can be removed if file # size is an issue jsonStr = json.dumps(dictionary, indent=4) # Create output directory if it doesn't exist directory = os.path.dirname(outFilepath) if not os.path.exists(directory): os.makedirs(directory) # Write JSON to file outFile = open(outFilepath, "w") outFile.write(jsonStr) descriptionFile = open(descriptionFilename, "w") descriptionFile.write(outFilepath) PRINT.info("\nJSON output written to %s" % outFilepath) outFile.close()