Example #1
0
def parse_component(the_parsed_component_xml, xml_filename, opt):
    """
    Creates a component meta-model and generates the
    component files.  Nothing is returned.
    """
    global BUILD_ROOT
    #

    parsed_port_xml_list = []

    #
    # Configure the meta-model for the component
    #
    port_type_files_list = the_parsed_component_xml.get_port_type_files()

    for port_file in port_type_files_list:
        port_file = search_for_file("Port", port_file)
        xml_parser_obj = XmlPortsParser.XmlPortsParser(port_file)
        parsed_port_xml_list.append(xml_parser_obj)
        del xml_parser_obj

    parsed_serializable_xml_list = []
    #
    # Configure the meta-model for the component
    #
    serializable_type_files_list = (
        the_parsed_component_xml.get_serializable_type_files()
    )
    for serializable_file in serializable_type_files_list:
        serializable_file = search_for_file("Serializable", serializable_file)
        xml_parser_obj = XmlSerializeParser.XmlSerializeParser(
            serializable_file
        )  # Telemetry/Params can only use generated serializable types
        # check to make sure that the serializables don't have things that channels and parameters can't have
        # can't have external non-xml members
        if len(xml_parser_obj.get_include_header_files()):
            print(
                "ERROR: Component include serializables cannot use user-defined types. file: "
                % serializable_file
            )
            sys.exit(-1)

        parsed_serializable_xml_list.append(xml_parser_obj)
        del xml_parser_obj

    model = CompFactory.CompFactory.getInstance()
    component_model = model.create(
        the_parsed_component_xml, parsed_port_xml_list, parsed_serializable_xml_list
    )

    return component_model
Example #2
0
    def __check_enum_files(self, enum_file_list):
        for enum_file in enum_file_list:
            enum_file = search_for_file("Enum", enum_file)
            enum_model = XmlEnumParser.XmlEnumParser(enum_file)
            enum_elem = etree.Element("enum")
            enum_type = enum_model.get_namespace(
            ) + "::" + enum_model.get_name()
            enum_elem.attrib["type"] = enum_type
            enum_value = 0
            for (
                    member_name,
                    member_value,
                    member_comment,
            ) in enum_model.get_items():
                enum_mem = etree.Element("item")
                enum_mem.attrib["name"] = member_name
                # keep track of incrementing enum value
                if member_value is not None:
                    enum_value = int(member_value)

                enum_mem.attrib["value"] = "%d" % enum_value
                enum_value = enum_value + 1
                if member_comment is not None:
                    enum_mem.attrib["description"] = member_comment
                enum_elem.append(enum_mem)
                self.__enum_list.append(enum_elem)
Example #3
0
 def process_array_files(self, array_file_list):
     for array_file in array_file_list:
         array_file = search_for_file("Array", array_file)
         array_model = XmlArrayParser.XmlArrayParser(array_file)
         # process any XML includes
         self.process_enum_files(array_model.get_include_enum_files())
         self.process_array_files(array_model.get_include_array_files())
         self.process_serializable_files(array_model.get_includes())
         array_type = array_model.get_namespace(
         ) + "::" + array_model.get_name()
         array_size = int(array_model.get_size())
         elem_type = array_model.get_type()
         elem_type_size = None
         if type(elem_type) == type(tuple()):
             elem_type_size = 4  # Fixme: can we put this in a constant somewhere?
         elif elem_type in self.size_dict.keys(
         ):  # See if it is a registered type
             elem_type_size = self.size_dict[elem_type]
         else:
             elem_type_size = self.get_type_size(elem_type,
                                                 1)  # Fixme: strings?
         if elem_type_size is None:
             print("Illegal type %s in array %s" % (elem_type, array_type))
             sys.exit(-1)
         self.add_type_size(array_type, elem_type_size * array_size)
Example #4
0
    def check_for_serial_xml(self):
        serializable_file_list = self.__parsed_xml_dict[
            self.__comp_type].get_serializable_type_files()
        if serializable_file_list is not None:
            for serializable_file in serializable_file_list:
                serializable_file = search_for_file("Serializable",
                                                    serializable_file)
                serializable_model = XmlSerializeParser.XmlSerializeParser(
                    serializable_file)
                if len(serializable_model.get_includes()) != 0:
                    raise Exception(
                        "%s: Can only include one level of serializable for dictionaries"
                        % serializable_file)

                # check for included enum XML in included serializable XML
                if len(serializable_model.get_include_enums()) != 0:
                    enum_file_list = serializable_model.get_include_enums()
                    self.__check_enum_files(enum_file_list)

                serializable_elem = etree.Element("serializable")
                serializable_type = (serializable_model.get_namespace() +
                                     "::" + serializable_model.get_name())
                serializable_elem.attrib["type"] = serializable_type
                members_elem = etree.Element("members")
                for (
                        member_name,
                        member_type,
                        member_size,
                        member_format_specifier,
                        member_comment,
                ) in serializable_model.get_members():
                    member_elem = etree.Element("member")
                    member_elem.attrib["name"] = member_name
                    member_elem.attrib[
                        "format_specifier"] = member_format_specifier
                    if member_comment is not None:
                        member_elem.attrib["description"] = member_comment
                    if isinstance(member_type, tuple):
                        type_name = "{}::{}::{}".format(
                            serializable_type,
                            member_name,
                            member_type[0][1],
                        )
                        enum_elem = self.__extract_enum_elem(
                            type_name, member_type[1])
                        self.__enum_list.append(enum_elem)
                    else:
                        type_name = member_type
                        if member_type == "string":
                            member_elem.attrib["len"] = member_size
                    member_elem.attrib["type"] = type_name
                    members_elem.append(member_elem)
                serializable_elem.append(members_elem)

                dup = False
                for ser in self.__serializable_list:
                    if ser.attrib["type"] == serializable_elem.attrib["type"]:
                        dup = True
                if not dup:
                    self.__serializable_list.append(serializable_elem)
Example #5
0
 def process_enum_files(self, enum_file_list):
     for enum_file in enum_file_list:
         enum_file = search_for_file("Enumeration", enum_file)
         enum_model = XmlEnumParser.XmlEnumParser(enum_file)
         enum_type = enum_model.get_namespace(
         ) + "::" + enum_model.get_name()
         self.add_type_size(
             enum_type,
             4)  # Fixme: can we put this in a constant somewhere?
Example #6
0
def __ser_loader(ser_xml: str) -> XmlSerializeParser:
    """ Loads the serializable """
    serializable_file = search_for_file("Serializable", ser_xml)
    xml_parser_obj = XmlSerializeParser(serializable_file)
    # Telemetry/Params can only use generated serializable types
    # check to make sure that the serializables don't have things that channels and parameters can't have
    # can't have external non-xml members
    if len(xml_parser_obj.get_include_header_files()):
        raise Exception(
            "ERROR: Component include serializables cannot use user-defined types. file: {}"
            .format(serializable_file))
    return xml_parser_obj
Example #7
0
def __component_loader(comp_xml: Path) -> Component:
    """ This is the mega system loader from topology and down """
    comp_file = search_for_file("Component", str(comp_xml))
    parsed_comp_xml = XmlComponentParser(comp_file)

    port_pairs = [
        __port_loader(port_file)
        for port_file in set(parsed_comp_xml.get_port_type_files())
    ]
    ser_xmls = [
        __ser_loader(ser_file)
        for ser_file in parsed_comp_xml.get_serializable_type_files()
    ]
    port_objs = {pair[0].get_type(): pair[0] for pair in port_pairs}
    port_objs["Serial"] = Port(None, "Serial", None)

    comp = CompFactory.getInstance().create(parsed_comp_xml,
                                            [pair[1] for pair in port_pairs],
                                            ser_xmls)

    # Inconsistency #5: Due to an inconsistent model, we have to rewrite the loaded component specification fields here
    comp._Component__kind2 = comp._Component__kind
    comp._Component__kind = comp._Component__name
    comp._Component__name = None

    # Inconsistency #1: component models only load one "port" and not 0-(max num - 1) ports of each type
    appendables = []
    for port in comp.get_ports():
        # Clone and enumerate ports
        port.set_source_num(0)
        for i in range(1, int(port.get_max_number())):
            new_port = copy.deepcopy(port)
            new_port.set_source_num(i)
            appendables.append(new_port)
    comp.get_ports().extend(appendables)

    for port in comp.get_ports():
        port_spec = port_objs[port.get_type()]
        try:
            __merge_object_model(port, port_spec)
        except InconsistencyException as inc:
            raise InconsistencyException(
                "Inconsistency detected between Component and Port specification: {}.{}. {}"
                .format(comp.get_kind(), comp.get_name(), inc)) from inc
    return comp
Example #8
0
    def check_for_arrays(self):
        array_file_list = self.__parsed_xml_dict[
            self.__comp_type].get_array_type_files()
        if array_file_list is not None:
            for array_file in array_file_list:
                array_file = search_for_file("Array", array_file)
                array_model = XmlArrayParser.XmlArrayParser(array_file)
                array_elem = etree.Element("array")

                array_name = array_model.get_namespace(
                ) + "::" + array_model.get_name()
                array_elem.attrib["name"] = array_name

                array_type = array_model.get_type()
                array_elem.attrib["type"] = array_type

                array_type_id = array_model.get_type_id()
                array_elem.attrib["type_id"] = array_type_id

                array_size = array_model.get_size()
                array_elem.attrib["size"] = array_size

                array_format = array_model.get_format()
                array_elem.attrib["format"] = array_format

                members_elem = etree.Element("defaults")
                for d_val in array_model.get_default():
                    member_elem = etree.Element("default")
                    member_elem.attrib["value"] = d_val
                    members_elem.append(member_elem)

                array_elem.append(members_elem)

                dup = False
                for arr in self.__array_list:
                    if arr.attrib["name"] == array_elem.attrib["name"]:
                        dup = True
                if not dup:
                    self.__array_list.append(array_elem)
Example #9
0
 def process_serializable_files(self, serializable_file_list):
     for serializable_file in serializable_file_list:
         serializable_file = search_for_file("Serializable",
                                             serializable_file)
         serializable_model = XmlSerializeParser.XmlSerializeParser(
             serializable_file)
         # process XML includes
         self.process_enum_files(serializable_model.get_include_enums())
         self.process_array_files(serializable_model.get_include_arrays())
         self.process_serializable_files(serializable_model.get_includes())
         serializable_type = (serializable_model.get_namespace() + "::" +
                              serializable_model.get_name())
         serializable_size = 0
         for (
                 member_name,
                 member_type,
                 member_size,
                 member_format_specifier,
                 member_comment,
                 _,
         ) in serializable_model.get_members():
             # if enumeration
             if type(member_type) == type(tuple()):
                 type_size = 4  # Fixme: can we put this in a constant somewhere?
             elif (member_type in self.size_dict.keys()
                   ):  # See if it is a registered type
                 type_size = self.size_dict[member_type]
             else:
                 type_size = self.get_type_size(member_type, member_size)
             if type_size is None:
                 print("Illegal type %s in serializable %s" %
                       (member_type, serializable_type))
                 sys.exit(-1)
             serializable_size += type_size
         self.add_type_size(serializable_type, serializable_size)
         if self.verbose:
             print("Serializable %s size %d" %
                   (serializable_type, serializable_size))
Example #10
0
def generate_component(the_parsed_component_xml,
                       xml_filename,
                       opt,
                       topology_model=None):
    """
    Creates a component meta-model, configures visitors and
    generates the component files.  Nothing is returned.
    """
    parsed_port_xml_list = []
    if opt.gen_report:
        report_file = open(f"{xml_filename.replace('Ai.xml', '')}Report.txt",
                           "w")
        num_input_ports = 0
        num_output_ports = 0

        # Count ports

        for port in the_parsed_component_xml.get_ports():
            if port.get_direction() == "input":
                num_input_ports = num_input_ports + int(port.get_max_number())
            if port.get_direction() == "output":
                num_output_ports = num_output_ports + int(
                    port.get_max_number())
        if len(the_parsed_component_xml.get_ports()):
            if num_input_ports:
                report_file.write(f"Input Ports: {num_input_ports}\n")
            if num_output_ports:
                report_file.write(f"Output Ports: {num_output_ports}\n")

        # Count regular commands
        commands = 0
        idList = ""
        if len(the_parsed_component_xml.get_commands()):
            for command in the_parsed_component_xml.get_commands():
                commands += len(command.get_opcodes())
                for opcode in command.get_opcodes():
                    idList += opcode + ","

        # Count parameter commands
        if len(the_parsed_component_xml.get_parameters()):
            for parameter in the_parsed_component_xml.get_parameters():
                commands += len(parameter.get_set_opcodes())
                for opcode in parameter.get_set_opcodes():
                    idList += opcode + ","
                commands += len(parameter.get_save_opcodes())
                for opcode in parameter.get_save_opcodes():
                    idList += opcode + ","

        if commands > 0:
            report_file.write(
                f"Commands: {commands}\n OpCodes: {idList[:-1]}\n")

        if len(the_parsed_component_xml.get_channels()):
            idList = ""
            channels = 0
            for channel in the_parsed_component_xml.get_channels():
                channels += len(channel.get_ids())
                for id in channel.get_ids():
                    idList += id + ","
            report_file.write(
                f"Channels: {channels}\n ChanIds: {idList[:-1]}\n")

        if len(the_parsed_component_xml.get_events()):
            idList = ""
            events = 0
            for event in the_parsed_component_xml.get_events():
                events += len(event.get_ids())
                for id in event.get_ids():
                    idList += id + ","
            report_file.write(f"Events: {events}\n EventIds: {idList[:-1]}\n")

        if len(the_parsed_component_xml.get_parameters()):
            idList = ""
            parameters = 0
            for parameter in the_parsed_component_xml.get_parameters():
                parameters += len(parameter.get_ids())
                for id in parameter.get_ids():
                    idList += id + ","
            report_file.write(
                f"Parameters: {parameters}\n ParamIds: {idList[:-1]}\n")
    #
    # Configure the meta-model for the component
    #
    port_type_files_list = the_parsed_component_xml.get_port_type_files()

    for port_file in port_type_files_list:
        port_file = search_for_file("Port", port_file)
        xml_parser_obj = XmlPortsParser.XmlPortsParser(port_file)
        # print xml_parser_obj.get_args()
        parsed_port_xml_list.append(xml_parser_obj)
        del xml_parser_obj

    parsed_serializable_xml_list = []
    #
    # Configure the meta-model for the component
    #
    serializable_type_files_list = (
        the_parsed_component_xml.get_serializable_type_files())
    for serializable_file in serializable_type_files_list:
        serializable_file = search_for_file("Serializable", serializable_file)
        xml_parser_obj = XmlSerializeParser.XmlSerializeParser(
            serializable_file
        )  # Telemetry/Params can only use generated serializable types
        # check to make sure that the serializables don't have things that channels and parameters can't have
        # can't have external non-xml members
        if len(xml_parser_obj.get_include_header_files()):
            PRINT.info(
                f"ERROR: Component include serializables cannot use user-defined types. file: {serializable_file}"
            )
            sys.exit(-1)

        # print xml_parser_obj.get_args()
        parsed_serializable_xml_list.append(xml_parser_obj)
        del xml_parser_obj

    #
    # for p in the_parsed_component_xml.get_ports():
    #    print p.get_name(), p.get_type()
    # print parsed_port_xml_list

    # for p in parsed_port_xml_list:
    #    print p.get_interface().get_name(), p.get_interface().get_namespace()
    #    print p.get_args()
    #    print p.get_include_header_files()
    #
    generator = CompFactory.CompFactory.getInstance()
    component_model = generator.create(the_parsed_component_xml,
                                       parsed_port_xml_list,
                                       parsed_serializable_xml_list)

    # tv = [x for x in component_model.get_events()]
    # for event_model in component_model.get_events():
    #    event_model.set_ids([1,2,3])
    #    tv.append(event_model)

    #
    # Configure and create the visitors that will generate the code.
    #
    generator = GenFactory.GenFactory.getInstance()
    #
    # Configure each visitor here.
    #
    if "Ai" in xml_filename:
        base = xml_filename.split("Ai")[0]
        h_instance_name = base + "_H"
        cpp_instance_name = base + "_Cpp"
        h_instance_name_tmpl = base + "_Impl_H"
        cpp_instance_name_tmpl = base + "_Impl_Cpp"
        h_instance_test_name = base + "_Test_H"
        cpp_instance_test_name = base + "_Test_Cpp"
        h_instance_gtest_name = base + "_GTest_H"
        cpp_instance_gtest_name = base + "_GTest_Cpp"
        h_instance_test_impl_name = base + "_TestImpl_H"
        cpp_instance_test_impl_name = base + "_TestImpl_Cpp"
    else:
        PRINT.info("Missing Ai at end of file name...")
        raise OSError

    #
    if opt.impl_flag:
        PRINT.info("Enabled generation of implementation template files...")
        generator.configureVisitor(h_instance_name_tmpl, "ImplHVisitor", True,
                                   True)
        generator.configureVisitor(cpp_instance_name_tmpl, "ImplCppVisitor",
                                   True, True)
    elif opt.unit_test:
        PRINT.info("Enabled generation of unit test component files...")
        generator.configureVisitor(h_instance_test_name,
                                   "ComponentTestHVisitor", True, True)
        generator.configureVisitor(cpp_instance_test_name,
                                   "ComponentTestCppVisitor", True, True)
        generator.configureVisitor(h_instance_gtest_name, "GTestHVisitor",
                                   True, True)
        generator.configureVisitor(cpp_instance_gtest_name, "GTestCppVisitor",
                                   True, True)
        generator.configureVisitor(h_instance_test_impl_name,
                                   "TestImplHVisitor", True, True)
        generator.configureVisitor(cpp_instance_test_impl_name,
                                   "TestImplCppVisitor", True, True)
    else:
        generator.configureVisitor(h_instance_name, "ComponentHVisitor", True,
                                   True)
        generator.configureVisitor(cpp_instance_name, "ComponentCppVisitor",
                                   True, True)

    # for port_file in port_type_files_list:
    #    if "Ai" in port_file:
    #        base = port_file.split("Ai")[0]
    #        h_instance_name = base + "_H"
    #        cpp_instance_name = base + "_Cpp"
    #    else:
    #        PRINT.info("Missing Ai at end of file: %s" % port_file)
    #        raise IOError
    #    generator.configureVisitor(h_instance_name, "PortCppVisitor", True, True)
    #    generator.configureVisitor(cpp_instance_name, "PortHVisitor", True, True)
    #
    # The idea here is that each of these generators is used to create
    # a certain portion of each output file.
    #
    initFiles = generator.create("initFiles")
    startSource = generator.create("startSource")
    includes1 = generator.create("includes1")
    includes2 = generator.create("includes2")
    namespace = generator.create("namespace")
    public = generator.create("public")
    protected = generator.create("protected")
    private = generator.create("private")
    finishSource = generator.create("finishSource")

    #
    # Generate the source code products here.
    #
    # 1. Open all the files
    initFiles(component_model)
    #
    # 2. Produce caltech notice here and other starting stuff.
    startSource(component_model)
    #
    # 3. Generate includes that all files get here.
    includes1(component_model)
    #
    # 4. Generate includes from model that a specific here.
    includes2(component_model)
    #
    # 5. Generate start of namespace here.
    namespace(component_model)
    #
    # 6. Generate public class code here.
    public(component_model)
    #
    # 7. Generate protected class code here.
    protected(component_model)
    #
    # 8. Generate private class code here.
    private(component_model)
    #
    # 9. Generate final code here and close all files.
    finishSource(component_model)
    #

    # if requested, generate ground system dictionary
    if opt.default_dict:
        if opt.dict_dir is None:
            PRINT.info("Dictionary output directory not specified!")
            raise OSError
        os.environ["DICT_DIR"] = opt.dict_dir
        default_dict_generator = GenFactory.GenFactory.getInstance()
        # iterate through command instances
        default_dict_generator.configureVisitor("Commands", "CommandVisitor",
                                                True, True)
        for command_model in component_model.get_commands():
            DEBUG.info(f"Processing command {command_model.get_mnemonic()}")
            defaultStartCmd = default_dict_generator.create("DictStart")
            defaultCmdHeader = default_dict_generator.create("DictHeader")
            defaultCmdBody = default_dict_generator.create("DictBody")

            defaultStartCmd(command_model)
            defaultCmdHeader(command_model)
            defaultCmdBody(command_model)

        for parameter_model in component_model.get_parameters():
            DEBUG.info(f"Processing parameter {parameter_model.get_name()}")
            defaultStartCmd = default_dict_generator.create("DictStart")
            defaultCmdHeader = default_dict_generator.create("DictHeader")
            defaultCmdBody = default_dict_generator.create("DictBody")

            defaultStartCmd(parameter_model)
            defaultCmdHeader(parameter_model)
            defaultCmdBody(parameter_model)

        default_dict_generator = GenFactory.GenFactory.getInstance()
        # iterate through command instances
        default_dict_generator.configureVisitor("Events", "EventVisitor", True,
                                                True)
        for event_model in component_model.get_events():
            DEBUG.info(f"Processing event {event_model.get_name()}")
            defaultStartEvent = default_dict_generator.create("DictStart")
            defaultEventHeader = default_dict_generator.create("DictHeader")
            defaultEventBody = default_dict_generator.create("DictBody")

            defaultStartEvent(event_model)
            defaultEventHeader(event_model)
            defaultEventBody(event_model)

        default_dict_generator = GenFactory.GenFactory.getInstance()
        # iterate through command instances
        default_dict_generator.configureVisitor("Channels", "ChannelVisitor",
                                                True, True)
        for channel_model in component_model.get_channels():
            DEBUG.info(f"Processing channel {channel_model.get_name()}")
            defaultStartChannel = default_dict_generator.create("DictStart")
            defaultChannelHeader = default_dict_generator.create("DictHeader")
            defaultChannelBody = default_dict_generator.create("DictBody")

            defaultStartChannel(channel_model)
            defaultChannelHeader(channel_model)
            defaultChannelBody(channel_model)

    if opt.html_docs:
        if opt.html_doc_dir is None:
            PRINT.info("HTML documentation output directory not specified!")
            raise OSError

        os.environ["HTML_DOC_SUBDIR"] = opt.html_doc_dir
        html_doc_generator = GenFactory.GenFactory.getInstance()
        html_doc_generator.configureVisitor(base + "_Html", "HtmlDocVisitor",
                                            True, True)
        htmlStart = html_doc_generator.create("HtmlStart")
        htmlDoc = html_doc_generator.create("HtmlDoc")
        finisher = html_doc_generator.create("finishSource")
        htmlStart(component_model)
        htmlDoc(component_model)
        finisher(component_model)

    if opt.md_docs:
        if opt.md_doc_dir is None:
            PRINT.info("MD documentation output directory not specified!")
            raise OSError

        os.environ["MD_DOC_SUBDIR"] = opt.md_doc_dir
        md_doc_generator = GenFactory.GenFactory.getInstance()
        md_doc_generator.configureVisitor(base + "_Md", "MdDocVisitor", True,
                                          True)
        mdStart = md_doc_generator.create("MdStart")
        mdDoc = md_doc_generator.create("MdDoc")
        finisher = md_doc_generator.create("finishSource")
        mdStart(component_model)
        mdDoc(component_model)
        finisher(component_model)
Example #11
0
def generate_component_instance_dictionary(the_parsed_component_xml, opt,
                                           topology_model):
    global DEPLOYMENT

    #
    parsed_port_xml_list = []
    parsed_serializable_xml_list = []
    # uses the topology model to process the items
    # checks if the topology model exists
    if topology_model is None:
        PRINT.info(
            "Topology model was not specified. Please also input a topology model when running this command."
        )
        raise OSError

    port_type_files_list = the_parsed_component_xml.get_port_type_files()

    for port_file in port_type_files_list:
        port_file = search_for_file("Port", port_file)
        xml_parser_obj = XmlPortsParser.XmlPortsParser(port_file)
        # print xml_parser_obj.get_args()
        parsed_port_xml_list.append(xml_parser_obj)
        del xml_parser_obj

    serializable_type_files_list = (
        the_parsed_component_xml.get_serializable_type_files())
    for serializable_file in serializable_type_files_list:
        serializable_file = search_for_file("Serializable", serializable_file)
        xml_parser_obj = XmlSerializeParser.XmlSerializeParser(
            serializable_file
        )  # Telemetry/Params can only use generated serializable types
        # check to make sure that the serializables don't have things that channels and parameters can't have
        # can't have external non-xml members
        if len(xml_parser_obj.get_include_header_files()):
            PRINT.info(
                f"ERROR: Component include serializables cannot use user-defined types. file: {serializable_file}"
            )
            sys.exit(-1)

        # print xml_parser_obj.get_args()
        parsed_serializable_xml_list.append(xml_parser_obj)
        del xml_parser_obj

    generator = CompFactory.CompFactory.getInstance()
    component_model = generator.create(the_parsed_component_xml,
                                       parsed_port_xml_list,
                                       parsed_serializable_xml_list)

    if opt.default_topology_dict:
        default_dict_generator = GenFactory.GenFactory.getInstance()
        # iterate through command instances
        default_dict_generator.configureVisitor("Commands",
                                                "InstanceCommandVisitor", True,
                                                True)
        for command_model in component_model.get_commands():
            DEBUG.info(f"Processing command {command_model.get_mnemonic()}")
            defaultStartCmd = default_dict_generator.create(
                "InstanceDictStart")
            defaultCmdHeader = default_dict_generator.create(
                "InstanceDictHeader")
            defaultCmdBody = default_dict_generator.create("InstanceDictBody")

            defaultStartCmd(command_model, topology_model)
            defaultCmdHeader(command_model, topology_model)
            defaultCmdBody(command_model, topology_model)

        for parameter_model in component_model.get_parameters():
            DEBUG.info(f"Processing parameter {parameter_model.get_name()}")
            defaultStartCmd = default_dict_generator.create(
                "InstanceDictStart")
            defaultCmdHeader = default_dict_generator.create(
                "InstanceDictHeader")
            defaultCmdBody = default_dict_generator.create("InstanceDictBody")

            defaultStartCmd(parameter_model, topology_model)
            defaultCmdHeader(parameter_model, topology_model)
            defaultCmdBody(parameter_model, topology_model)

        default_dict_generator = GenFactory.GenFactory.getInstance()
        # iterate through command instances
        default_dict_generator.configureVisitor("Events",
                                                "InstanceEventVisitor", True,
                                                True)
        for event_model in component_model.get_events():
            DEBUG.info(f"Processing event {event_model.get_name()}")
            defaultStartEvent = default_dict_generator.create(
                "InstanceDictStart")
            defaultEventHeader = default_dict_generator.create(
                "InstanceDictHeader")
            defaultEventBody = default_dict_generator.create(
                "InstanceDictBody")

            defaultStartEvent(event_model, topology_model)
            defaultEventHeader(event_model, topology_model)
            defaultEventBody(event_model, topology_model)

        default_dict_generator = GenFactory.GenFactory.getInstance()
        # iterate through command instances
        default_dict_generator.configureVisitor("Channels",
                                                "InstanceChannelVisitor", True,
                                                True)
        for channel_model in component_model.get_channels():
            DEBUG.info(f"Processing channel {channel_model.get_name()}")
            defaultStartChannel = default_dict_generator.create(
                "InstanceDictStart")
            defaultChannelHeader = default_dict_generator.create(
                "InstanceDictHeader")
            defaultChannelBody = default_dict_generator.create(
                "InstanceDictBody")

            defaultStartChannel(channel_model, topology_model)
            defaultChannelHeader(channel_model, topology_model)
            defaultChannelBody(channel_model, topology_model)
Example #12
0
def generate_xml_dict(the_parsed_topology_xml, xml_filename, opt):
    """
    Generates GDS XML dictionary from parsed topology XML
    """
    if VERBOSE:
        print("Topology xml type description file: %s" % xml_filename)
    model = TopoFactory.TopoFactory.getInstance()
    topology_model = model.create(the_parsed_topology_xml)

    GenFactory.GenFactory.getInstance()

    # uses the topology model to process the items
    # create list of used parsed component xmls
    parsed_xml_dict = {}
    for comp in the_parsed_topology_xml.get_instances():
        if comp.get_type() in topology_model.get_base_id_dict():
            parsed_xml_dict[comp.get_type()] = comp.get_comp_xml()
        else:
            PRINT.info(
                "Components with type {} aren't in the topology model.".format(
                    comp.get_type()))

    #
    xml_list = []
    for parsed_xml_type in parsed_xml_dict:
        if parsed_xml_dict[parsed_xml_type] is None:
            print(
                "ERROR: XML of type {} is being used, but has not been parsed correctly. Check if file exists or add xml file with the 'import_component_type' tag to the Topology file."
                .format(parsed_xml_type))
            raise Exception()
        xml_list.append(parsed_xml_dict[parsed_xml_type])

    topology_model.set_instance_xml_list(xml_list)

    topology_dict = etree.Element("dictionary")
    topology_dict.attrib["topology"] = the_parsed_topology_xml.get_name()
    # create a new XML tree for dictionary
    enum_list = etree.Element("enums")
    serializable_list = etree.Element("serializables")
    command_list = etree.Element("commands")
    event_list = etree.Element("events")
    telemetry_list = etree.Element("channels")
    parameter_list = etree.Element("parameters")

    for comp in the_parsed_topology_xml.get_instances():
        comp_type = comp.get_type()
        comp_name = comp.get_name()
        comp_id = int(comp.get_base_id())
        PRINT.debug("Processing {} [{}] ({})".format(comp_name, comp_type,
                                                     hex(comp_id)))

        # check for included serializable XML
        if parsed_xml_dict[comp_type].get_serializable_type_files(
        ) is not None:
            serializable_file_list = parsed_xml_dict[
                comp_type].get_serializable_type_files()
            for serializable_file in serializable_file_list:
                serializable_file = search_for_file("Serializable",
                                                    serializable_file)
                serializable_model = XmlSerializeParser.XmlSerializeParser(
                    serializable_file)
                if len(serializable_model.get_includes()) != 0:
                    raise Exception(
                        "%s: Can only include one level of serializable for dictionaries"
                        % serializable_file)
                serializable_elem = etree.Element("serializable")
                serializable_type = (serializable_model.get_namespace() +
                                     "::" + serializable_model.get_name())
                serializable_elem.attrib["type"] = serializable_type
                members_elem = etree.Element("members")
                for (
                        member_name,
                        member_type,
                        member_size,
                        member_format_specifier,
                        member_comment,
                ) in serializable_model.get_members():
                    member_elem = etree.Element("member")
                    member_elem.attrib["name"] = member_name
                    member_elem.attrib[
                        "format_specifier"] = member_format_specifier
                    if member_comment is not None:
                        member_elem.attrib["description"] = member_comment
                    if isinstance(member_type, tuple):
                        enum_value = 0
                        type_name = "{}::{}::{}".format(
                            serializable_type,
                            member_name,
                            member_type[0][1],
                        )
                        # Add enum entry
                        enum_elem = etree.Element("enum")
                        enum_elem.attrib["type"] = type_name
                        # Add enum members
                        for (membername, value, comment) in member_type[1]:
                            enum_mem = etree.Element("item")
                            enum_mem.attrib["name"] = membername
                            # keep track of incrementing enum value
                            if value is not None:
                                enum_value = int(value)

                            enum_mem.attrib["value"] = "%d" % enum_value
                            enum_value = enum_value + 1
                            if comment is not None:
                                enum_mem.attrib["description"] = comment
                            enum_elem.append(enum_mem)
                        enum_list.append(enum_elem)
                    else:
                        type_name = member_type
                        if member_type == "string":
                            member_elem.attrib["len"] = member_size
                    member_elem.attrib["type"] = type_name
                    members_elem.append(member_elem)
                serializable_elem.append(members_elem)

                dup = False
                for ser in serializable_list:
                    if ser.attrib["type"] == serializable_elem.attrib["type"]:
                        dup = True
                if not dup:
                    serializable_list.append(serializable_elem)

        # check for commands
        if parsed_xml_dict[comp_type].get_commands() is not None:
            for command in parsed_xml_dict[comp_type].get_commands():
                if VERBOSE:
                    print("Processing Command %s" % command.get_mnemonic())
                command_elem = etree.Element("command")
                command_elem.attrib["component"] = comp_name
                command_elem.attrib["mnemonic"] = command.get_mnemonic()
                command_elem.attrib["opcode"] = "%s" % (
                    hex(int(command.get_opcodes()[0], base=0) + comp_id))
                if "comment" in list(command_elem.attrib.keys()):
                    command_elem.attrib["description"] = command_elem.attrib[
                        "comment"]
                args_elem = etree.Element("args")
                for arg in command.get_args():
                    arg_elem = etree.Element("arg")
                    arg_elem.attrib["name"] = arg.get_name()
                    arg_type = arg.get_type()
                    if isinstance(arg_type, tuple):
                        enum_value = 0
                        type_name = "{}::{}::{}".format(
                            comp_type,
                            arg.get_name(),
                            arg_type[0][1],
                        )
                        # Add enum entry
                        enum_elem = etree.Element("enum")
                        enum_elem.attrib["type"] = type_name
                        # Add enum members
                        for (membername, value, comment) in arg_type[1]:
                            enum_mem = etree.Element("item")
                            enum_mem.attrib["name"] = membername
                            # keep track of incrementing enum value
                            if value is not None:
                                enum_value = int(value)

                            enum_mem.attrib["value"] = "%d" % enum_value
                            enum_value = enum_value + 1
                            if comment is not None:
                                enum_mem.attrib["description"] = comment
                            enum_elem.append(enum_mem)
                        enum_list.append(enum_elem)
                    else:
                        type_name = arg_type
                        if arg_type == "string":
                            arg_elem.attrib["len"] = arg.get_size()
                    arg_elem.attrib["type"] = type_name
                    args_elem.append(arg_elem)
                command_elem.append(args_elem)
                command_list.append(command_elem)

        # check for channels
        if parsed_xml_dict[comp_type].get_channels() is not None:
            for chan in parsed_xml_dict[comp_type].get_channels():
                if VERBOSE:
                    print("Processing Channel %s" % chan.get_name())
                channel_elem = etree.Element("channel")
                channel_elem.attrib["component"] = comp_name
                channel_elem.attrib["name"] = chan.get_name()
                channel_elem.attrib["id"] = "%s" % (
                    hex(int(chan.get_ids()[0], base=0) + comp_id))
                if chan.get_format_string() is not None:
                    channel_elem.attrib[
                        "format_string"] = chan.get_format_string()
                if chan.get_comment() is not None:
                    channel_elem.attrib["description"] = chan.get_comment()

                channel_elem.attrib["id"] = "%s" % (
                    hex(int(chan.get_ids()[0], base=0) + comp_id))
                if "comment" in list(channel_elem.attrib.keys()):
                    channel_elem.attrib["description"] = channel_elem.attrib[
                        "comment"]
                channel_type = chan.get_type()
                if isinstance(channel_type, tuple):
                    enum_value = 0
                    type_name = "{}::{}::{}".format(
                        comp_type,
                        chan.get_name(),
                        channel_type[0][1],
                    )
                    # Add enum entry
                    enum_elem = etree.Element("enum")
                    enum_elem.attrib["type"] = type_name
                    # Add enum members
                    for (membername, value, comment) in channel_type[1]:
                        enum_mem = etree.Element("item")
                        enum_mem.attrib["name"] = membername
                        # keep track of incrementing enum value
                        if value is not None:
                            enum_value = int(value)

                        enum_mem.attrib["value"] = "%d" % enum_value
                        enum_value = enum_value + 1
                        if comment is not None:
                            enum_mem.attrib["description"] = comment
                        enum_elem.append(enum_mem)
                    enum_list.append(enum_elem)
                else:
                    type_name = channel_type
                    if channel_type == "string":
                        channel_elem.attrib["len"] = chan.get_size()
                (lr, lo, ly, hy, ho, hr) = chan.get_limits()
                if lr is not None:
                    channel_elem.attrib["low_red"] = lr
                if lo is not None:
                    channel_elem.attrib["low_orange"] = lo
                if ly is not None:
                    channel_elem.attrib["low_yellow"] = ly
                if hy is not None:
                    channel_elem.attrib["high_yellow"] = hy
                if ho is not None:
                    channel_elem.attrib["high_orange"] = ho
                if hr is not None:
                    channel_elem.attrib["high_red"] = hr

                channel_elem.attrib["type"] = type_name
                telemetry_list.append(channel_elem)

        # check for events
        if parsed_xml_dict[comp_type].get_events() is not None:
            for event in parsed_xml_dict[comp_type].get_events():
                if VERBOSE:
                    print("Processing Event %s" % event.get_name())
                event_elem = etree.Element("event")
                event_elem.attrib["component"] = comp_name
                event_elem.attrib["name"] = event.get_name()
                event_elem.attrib["id"] = "%s" % (
                    hex(int(event.get_ids()[0], base=0) + comp_id))
                event_elem.attrib["severity"] = event.get_severity()
                format_string = event.get_format_string()
                if "comment" in list(event_elem.attrib.keys()):
                    event_elem.attrib["description"] = event_elem.attrib[
                        "comment"]
                args_elem = etree.Element("args")
                arg_num = 0
                for arg in event.get_args():
                    arg_elem = etree.Element("arg")
                    arg_elem.attrib["name"] = arg.get_name()
                    arg_type = arg.get_type()
                    if isinstance(arg_type, tuple):
                        enum_value = 0
                        type_name = "{}::{}::{}".format(
                            comp_type,
                            arg.get_name(),
                            arg_type[0][1],
                        )
                        # Add enum entry
                        enum_elem = etree.Element("enum")
                        enum_elem.attrib["type"] = type_name
                        # Add enum members
                        for (membername, value, comment) in arg_type[1]:
                            enum_mem = etree.Element("item")
                            enum_mem.attrib["name"] = membername
                            # keep track of incrementing enum value
                            if value is not None:
                                enum_value = int(value)

                            enum_mem.attrib["value"] = "%d" % enum_value
                            enum_value = enum_value + 1
                            if comment is not None:
                                enum_mem.attrib["description"] = comment
                            enum_elem.append(enum_mem)
                        enum_list.append(enum_elem)
                        # replace enum format string %d with %s for ground system
                        format_string = (DictTypeConverter.DictTypeConverter(
                        ).format_replace(format_string, arg_num, "d", "s"))
                    else:
                        type_name = arg_type
                        if arg_type == "string":
                            arg_elem.attrib["len"] = arg.get_size()
                    arg_elem.attrib["type"] = type_name
                    args_elem.append(arg_elem)
                    arg_num += 1
                event_elem.attrib["format_string"] = format_string
                event_elem.append(args_elem)
                event_list.append(event_elem)

        # check for parameters
        if parsed_xml_dict[comp_type].get_parameters() is not None:
            for parameter in parsed_xml_dict[comp_type].get_parameters():
                if VERBOSE:
                    print("Processing Parameter %s" % chan.get_name())
                param_default = None
                command_elem_set = etree.Element("command")
                command_elem_set.attrib["component"] = comp_name
                command_elem_set.attrib["mnemonic"] = parameter.get_name(
                ) + "_PRM_SET"
                command_elem_set.attrib["opcode"] = "%s" % (
                    hex(int(parameter.get_set_opcodes()[0], base=0) + comp_id))
                if "comment" in list(command_elem.attrib.keys()):
                    command_elem_set.attrib["description"] = (
                        command_elem_set.attrib["comment"] + " parameter set")
                else:
                    command_elem_set.attrib["description"] = (
                        parameter.get_name() + " parameter set")

                args_elem = etree.Element("args")
                arg_elem = etree.Element("arg")
                arg_elem.attrib["name"] = "val"
                arg_type = parameter.get_type()
                if isinstance(arg_type, tuple):
                    enum_value = 0
                    type_name = "{}::{}::{}".format(
                        comp_type,
                        arg.get_name(),
                        arg_type[0][1],
                    )
                    # Add enum entry
                    enum_elem = etree.Element("enum")
                    enum_elem.attrib["type"] = type_name
                    # Add enum members
                    for (membername, value, comment) in arg_type[1]:
                        enum_mem = etree.Element("item")
                        enum_mem.attrib["name"] = membername
                        # keep track of incrementing enum value
                        if value is not None:
                            enum_value = int(value)

                        enum_mem.attrib["value"] = "%d" % enum_value
                        enum_value = enum_value + 1
                        if comment is not None:
                            enum_mem.attrib["description"] = comment
                        enum_elem.append(enum_mem)
                        # assign default to be first enum member
                        if param_default is None:
                            param_default = membername
                    enum_list.append(enum_elem)
                else:
                    type_name = arg_type
                    if arg_type == "string":
                        arg_elem.attrib["len"] = arg.get_size()
                    else:
                        param_default = "0"
                arg_elem.attrib["type"] = type_name
                args_elem.append(arg_elem)
                command_elem_set.append(args_elem)
                command_list.append(command_elem_set)

                command_elem_save = etree.Element("command")
                command_elem_save.attrib["component"] = comp_name
                command_elem_save.attrib["mnemonic"] = (parameter.get_name() +
                                                        "_PRM_SAVE")
                command_elem_save.attrib["opcode"] = "%s" % (hex(
                    int(parameter.get_save_opcodes()[0], base=0) + comp_id))
                if "comment" in list(command_elem.attrib.keys()):
                    command_elem_save.attrib["description"] = (
                        command_elem_set.attrib["comment"] + " parameter set")
                else:
                    command_elem_save.attrib["description"] = (
                        parameter.get_name() + " parameter save")

                command_list.append(command_elem_save)

                param_elem = etree.Element("parameter")
                param_elem.attrib["component"] = comp_name
                param_elem.attrib["name"] = parameter.get_name()
                param_elem.attrib["id"] = "%s" % (
                    hex(int(parameter.get_ids()[0], base=0) + comp_id))
                if parameter.get_default() is not None:
                    param_default = parameter.get_default()
                param_elem.attrib["default"] = param_default

                parameter_list.append(param_elem)

    # Remove duplicates from enum list
    temp_enum_list = []
    for enum_elem in enum_list:
        temp_enum_list.append(enum_elem)
    for enum_elem in temp_enum_list:
        should_remove = False
        for temp_enum in enum_list:
            # Skip over comparisons between same exact element
            if id(enum_elem) == id(temp_enum):
                continue

            # Check all attributes
            if temp_enum.attrib["type"] == enum_elem.attrib["type"]:
                should_remove = True
            if (not len(temp_enum.getchildren()) == len(
                    enum_elem.getchildren()) and should_remove):
                should_remove = False
            children1 = temp_enum.getchildren()
            children2 = enum_elem.getchildren()
            if children1 and children2:
                i = 0
                while i < len(children1) and i < len(children2):
                    if (not children1[i].attrib["name"]
                            == children2[i].attrib["name"] and should_remove):
                        should_remove = False
                    i += 1
            if should_remove:
                break
        if should_remove:
            enum_list.remove(enum_elem)

    topology_dict.append(enum_list)
    topology_dict.append(serializable_list)
    topology_dict.append(command_list)
    topology_dict.append(event_list)
    topology_dict.append(telemetry_list)
    topology_dict.append(parameter_list)

    fileName = the_parsed_topology_xml.get_xml_filename().replace(
        "Ai.xml", "Dictionary.xml")
    if VERBOSE:
        print("Generating XML dictionary %s" % fileName)
    fd = open(
        fileName,
        "wb")  # Note: binary forces the same encoding of the source files
    fd.write(etree.tostring(topology_dict, pretty_print=True))
    if VERBOSE:
        print("Generated XML dictionary %s" % fileName)

    return topology_model
Example #13
0
def write_pymods_from_comp(the_parsed_component_xml, opt, topology_model):
    """
    Writes python modules for a component xml
    "the_parsed_component_xml"
    """
    global BUILD_ROOT
    global DEPLOYMENT
    global VERBOSE

    parsed_port_xml_list = []
    parsed_serializable_xml_list = []
    # uses the topology model to process the items
    # checks if the topology model exists
    if topology_model is None:
        PRINT.info(
            "Topology model was not specified. Please also input a topology model when running this command."
        )
        raise OSError

    port_type_files_list = the_parsed_component_xml.get_port_type_files()

    for port_file in port_type_files_list:
        port_file = search_for_file("Port", port_file)
        xml_parser_obj = XmlPortsParser.XmlPortsParser(port_file)
        parsed_port_xml_list.append(xml_parser_obj)
        del xml_parser_obj

    serializable_type_files_list = (
        the_parsed_component_xml.get_serializable_type_files())
    for serializable_file in serializable_type_files_list:
        serializable_file = search_for_file("Serializable", serializable_file)
        xml_parser_obj = XmlSerializeParser.XmlSerializeParser(
            serializable_file
        )  # Telemetry/Params can only use generated serializable types
        # check to make sure that the serializables don't have things that channels and parameters can't have
        # can't have external non-xml members
        if len(xml_parser_obj.get_include_header_files()):
            print(
                "ERROR: Component include serializables cannot use user-defined types. file: "
                % serializable_file)
            sys.exit(-1)

        parsed_serializable_xml_list.append(xml_parser_obj)
        del xml_parser_obj

    model = CompFactory.CompFactory.getInstance()
    component_model = model.create(the_parsed_component_xml,
                                   parsed_port_xml_list,
                                   parsed_serializable_xml_list)

    instChannelWriter = InstChannelWriter.InstChannelWriter()
    instCommandWriter = InstCommandWriter.InstCommandWriter()
    instEventWriter = InstEventWriter.InstEventWriter()

    if opt.dict_dir is None:
        if VERBOSE:
            print(
                "Dictionary output directory not specified!, defaulting to cwd"
            )
        opt.dict_dir = os.getcwd()
    os.environ["DICT_DIR"] = opt.dict_dir

    # iterate through command instances
    for command_model in component_model.get_commands():
        if VERBOSE:
            print("Generating command dict %s" % command_model.get_mnemonic())
        instCommandWriter.DictStartWrite(command_model, topology_model)
        instCommandWriter.DictHeaderWrite(command_model, topology_model)
        instCommandWriter.DictBodyWrite(command_model, topology_model)

    for parameter_model in component_model.get_parameters():
        if VERBOSE:
            print("Generating parameter dict %s" % parameter_model.get_name())
        instCommandWriter.DictStartWrite(parameter_model, topology_model)
        instCommandWriter.DictHeaderWrite(parameter_model, topology_model)
        instCommandWriter.DictBodyWrite(parameter_model, topology_model)

    # iterate through command instances
    for event_model in component_model.get_events():
        if VERBOSE:
            print("Generating event dict %s" % event_model.get_name())
        instEventWriter.DictStartWrite(event_model, topology_model)
        instEventWriter.DictHeaderWrite(event_model, topology_model)
        instEventWriter.DictBodyWrite(event_model, topology_model)

    # iterate through command instances
    for channel_model in component_model.get_channels():
        if VERBOSE:
            print("Generating channel dict %s" % channel_model.get_name())
        instChannelWriter.DictStartWrite(channel_model, topology_model)
        instChannelWriter.DictHeaderWrite(channel_model, topology_model)
        instChannelWriter.DictBodyWrite(channel_model, topology_model)
Example #14
0
    def gen_packet_file(self, xml_filename):

        view_path = "./Views"

        if not os.path.exists(view_path):
            os.mkdir(view_path)

        # Make sure files
        if not os.path.isfile(xml_filename):
            raise TlmPacketParseIOError("File %s does not exist!" %
                                        xml_filename)

        if not "PacketsAi" in xml_filename:
            raise IOError("ERROR: Missing PacketsAi at end of file name %s" %
                          xml_filename)

        fd = open(xml_filename, "r")
        xml_parser = etree.XMLParser(remove_comments=True)
        element_tree = etree.parse(fd, parser=xml_parser)
        channel_size_dict = None

        ht = Template(header_file_template)
        it = Template(impl_file_template)

        if element_tree.getroot().tag == "packets":

            list_name = element_tree.getroot().attrib["name"]
            list_namespace = element_tree.getroot().attrib["namespace"]
            max_size = int(element_tree.getroot().attrib["size"])

            # fill in template fields for header
            ht.packet_list_name = list_name
            ht.packet_list_namespace = list_namespace

            # fill in template fields for implementation file
            it.packet_list_name = list_name
            it.packet_list_namespace = list_namespace
            it.max_size = max_size

            packet_list_container = []

            packetized_channel_list = []
            it.ignore_list = []
            id_list = []  # check for duplicates
            ignore_name_list = []

            size_dict = {}

            ht.num_packets = 0
            total_packet_size = 0
            levels = []
            view_path = "./Views"
            # find the topology import
            for entry in element_tree.getroot():
                # read in topology file
                if entry.tag == "import_topology":
                    top_file = search_for_file("Packet", entry.text)
                    if top_file is None:
                        raise TlmPacketParseIOError(
                            "import file %s not found" % entry.text)
                    the_parsed_topology_xml = XmlTopologyParser.XmlTopologyParser(
                        top_file)
                    deployment = the_parsed_topology_xml.get_deployment()
                    if self.verbose:
                        print("Found assembly or deployment named: %s\n" %
                              deployment)
                    channel_size_dict = self.generate_channel_size_dict(
                        the_parsed_topology_xml, xml_filename)
                elif entry.tag == "packet":
                    if channel_size_dict is None:
                        raise TlmPacketParseValueError(
                            "%s: Topology import must be before packet definitions"
                            % xml_filename)
                    packet_size = 0
                    packet_name = entry.attrib["name"]
                    # Open a text file for a GDS view
                    vfd = open("%s/%s.txt" % (view_path, packet_name), "w")
                    packet_id = entry.attrib["id"]
                    packet_level = entry.attrib["level"]
                    print("Packetizing %s (%s)" % (packet_name, packet_id))
                    if packet_id in id_list:
                        raise TlmPacketParseValueError(
                            "Duplicate packet id %s" % packet_id)
                    else:
                        id_list.append(packet_id)

                    channel_list = []
                    for channel in entry:
                        channel_name = channel.attrib["name"]
                        if not channel_name in channel_size_dict:
                            raise TlmPacketParseValueError(
                                "Channel %s does not exist" % channel_name)
                        (channel_id,
                         channel_size) = channel_size_dict[channel_name]
                        packet_size += channel_size
                        if self.verbose:
                            print(" -Channel %s ID %d size %d" %
                                  (channel_name, channel_id, channel_size))
                        channel_list.append(
                            (channel_id, channel_size, channel_name))
                        packetized_channel_list.append(channel_name)
                        vfd.write("%s\n" % channel_name)
                    packet_list_container.append(
                        (packet_name, packet_id, packet_level, channel_list))
                    ht.num_packets += 1
                    packet_size += (
                        11 + 2 + 4
                    )  # raw packet size + time tag + packet id + packet descriptor
                    if packet_size > max_size:
                        raise TlmPacketParseValueError(
                            "Packet %s is too large. Size: %d max: %d" %
                            (packet_name, packet_size, max_size))
                    print("Packet %s size %d/%d" %
                          (packet_name, packet_size, max_size))
                    total_packet_size += packet_size

                    if packet_level in size_dict:
                        size_dict[packet_level] = size_dict[
                            packet_level] + packet_size
                    else:
                        size_dict[packet_level] = packet_size

                    if not packet_level in levels:
                        levels.append(packet_level)
                    vfd.close()

                elif entry.tag == "ignore":
                    if channel_size_dict is None:
                        raise TlmPacketParseValueError(
                            "%s: Topology import must be before packet definitions"
                            % xml_filename)
                    for channel in entry:
                        channel_name = channel.attrib["name"]
                        if not channel_name in channel_size_dict:
                            raise TlmPacketParseValueError(
                                "Channel %s does not exist" % channel_name)
                        (channel_id,
                         channel_size) = channel_size_dict[channel_name]
                        it.ignore_list.append((channel_id, channel_name))
                        if self.verbose:
                            print("Channel %s (%d) ignored" %
                                  (channel_name, channel_id))
                        ignore_name_list.append(channel_name)
                else:
                    raise TlmPacketParseValueError("Invalid packet tag %s" %
                                                   entry.tag)

                if self.verbose:
                    print("Entry: %s" % entry.tag)
        else:
            raise TlmPacketParseValueError("Invalid xml type %s" %
                                           element_tree.getroot().tag)

        output_file_base = os.path.splitext(
            os.path.basename(xml_filename))[0].replace("Ai", "")
        file_dir = os.path.dirname(xml_filename).replace(
            get_nearest_build_root(xml_filename) + os.sep, "")

        missing_channels = False

        for channel in channel_size_dict:
            if (not channel in packetized_channel_list
                    and not channel in ignore_name_list):
                (channel_id, channel_size) = channel_size_dict[channel]
                print("Channel %s (%d) not packetized or ignored." %
                      (channel, channel_id))
                missing_channels = True

        if missing_channels:
            raise TlmPacketParseValueError("Channels missing from packets")

        header = "%sAc.hpp" % output_file_base
        source = "%sAc.cpp" % output_file_base
        print("Generating %s and %s" % (header, source))
        levels.sort()
        for level in levels:
            print("Level: %s Bytes: %d bits: %d" %
                  (level, size_dict[level], size_dict[level] * 8))
        print("Number of packets: %d\nTotal packet bytes: %d bits: %d" %
              (ht.num_packets, total_packet_size, total_packet_size * 8))

        it.packet_list = packet_list_container
        it.output_header = "%s/%sAc.hpp" % (file_dir, output_file_base)

        open(header, "w").write(str(ht))
        open(source, "w").write(str(it))

        target_directory = os.getcwd().replace("\\", os.sep)
        header_target = target_directory + os.sep + header
        source_target = target_directory + os.sep + source

        # write dependency file
        if self.dependency is not None:
            dependency_file_txt = "\n%s %s: %s\n" % (
                source_target,
                header_target,
                top_file,
            )
            open(self.dependency, "w").write(dependency_file_txt)
Example #15
0
def __port_loader(port_xml: str) -> Tuple[Port, XmlPortsParser]:
    """ Loads a port from an XML filename"""
    port_file = search_for_file("Port", port_xml)
    parsed_port_xml = XmlPortsParser(port_file)
    port = PortFactory.getInstance().create(parsed_port_xml)
    return port, parsed_port_xml