Пример #1
0
    def check_for_serial_xml(self):
        serializable_file_list = self.__parsed_xml_dict[
            self.__comp_type].get_serializable_type_files()
        if serializable_file_list is not None:
            for serializable_file in serializable_file_list:
                serializable_file = search_for_file("Serializable",
                                                    serializable_file)
                serializable_model = XmlSerializeParser.XmlSerializeParser(
                    serializable_file)
                if len(serializable_model.get_includes()) != 0:
                    raise Exception(
                        "%s: Can only include one level of serializable for dictionaries"
                        % serializable_file)

                # check for included enum XML in included serializable XML
                if len(serializable_model.get_include_enums()) != 0:
                    enum_file_list = serializable_model.get_include_enums()
                    self.__check_enum_files(enum_file_list)

                serializable_elem = etree.Element("serializable")
                serializable_type = (serializable_model.get_namespace() +
                                     "::" + serializable_model.get_name())
                serializable_elem.attrib["type"] = serializable_type
                members_elem = etree.Element("members")
                for (
                        member_name,
                        member_type,
                        member_size,
                        member_format_specifier,
                        member_comment,
                ) in serializable_model.get_members():
                    member_elem = etree.Element("member")
                    member_elem.attrib["name"] = member_name
                    member_elem.attrib[
                        "format_specifier"] = member_format_specifier
                    if member_comment is not None:
                        member_elem.attrib["description"] = member_comment
                    if isinstance(member_type, tuple):
                        type_name = "{}::{}::{}".format(
                            serializable_type,
                            member_name,
                            member_type[0][1],
                        )
                        enum_elem = self.__extract_enum_elem(
                            type_name, member_type[1])
                        self.__enum_list.append(enum_elem)
                    else:
                        type_name = member_type
                        if member_type == "string":
                            member_elem.attrib["len"] = member_size
                    member_elem.attrib["type"] = type_name
                    members_elem.append(member_elem)
                serializable_elem.append(members_elem)

                dup = False
                for ser in self.__serializable_list:
                    if ser.attrib["type"] == serializable_elem.attrib["type"]:
                        dup = True
                if not dup:
                    self.__serializable_list.append(serializable_elem)
Пример #2
0
def parse_component(the_parsed_component_xml, xml_filename, opt):
    """
    Creates a component meta-model and generates the
    component files.  Nothing is returned.
    """
    global BUILD_ROOT
    #

    parsed_port_xml_list = []

    #
    # Configure the meta-model for the component
    #
    port_type_files_list = the_parsed_component_xml.get_port_type_files()

    for port_file in port_type_files_list:
        port_file = search_for_file("Port", port_file)
        xml_parser_obj = XmlPortsParser.XmlPortsParser(port_file)
        parsed_port_xml_list.append(xml_parser_obj)
        del xml_parser_obj

    parsed_serializable_xml_list = []
    #
    # Configure the meta-model for the component
    #
    serializable_type_files_list = (
        the_parsed_component_xml.get_serializable_type_files()
    )
    for serializable_file in serializable_type_files_list:
        serializable_file = search_for_file("Serializable", serializable_file)
        xml_parser_obj = XmlSerializeParser.XmlSerializeParser(
            serializable_file
        )  # Telemetry/Params can only use generated serializable types
        # check to make sure that the serializables don't have things that channels and parameters can't have
        # can't have external non-xml members
        if len(xml_parser_obj.get_include_header_files()):
            print(
                "ERROR: Component include serializables cannot use user-defined types. file: "
                % serializable_file
            )
            sys.exit(-1)

        parsed_serializable_xml_list.append(xml_parser_obj)
        del xml_parser_obj

    model = CompFactory.CompFactory.getInstance()
    component_model = model.create(
        the_parsed_component_xml, parsed_port_xml_list, parsed_serializable_xml_list
    )

    return component_model
Пример #3
0
 def process_serializable_files(self, serializable_file_list):
     for serializable_file in serializable_file_list:
         serializable_file = search_for_file("Serializable",
                                             serializable_file)
         serializable_model = XmlSerializeParser.XmlSerializeParser(
             serializable_file)
         # process XML includes
         self.process_enum_files(serializable_model.get_include_enums())
         self.process_array_files(serializable_model.get_include_arrays())
         self.process_serializable_files(serializable_model.get_includes())
         serializable_type = (serializable_model.get_namespace() + "::" +
                              serializable_model.get_name())
         serializable_size = 0
         for (
                 member_name,
                 member_type,
                 member_size,
                 member_format_specifier,
                 member_comment,
                 _,
         ) in serializable_model.get_members():
             # if enumeration
             if type(member_type) == type(tuple()):
                 type_size = 4  # Fixme: can we put this in a constant somewhere?
             elif (member_type in self.size_dict.keys()
                   ):  # See if it is a registered type
                 type_size = self.size_dict[member_type]
             else:
                 type_size = self.get_type_size(member_type, member_size)
             if type_size is None:
                 print("Illegal type %s in serializable %s" %
                       (member_type, serializable_type))
                 sys.exit(-1)
             serializable_size += type_size
         self.add_type_size(serializable_type, serializable_size)
         if self.verbose:
             print("Serializable %s size %d" %
                   (serializable_type, serializable_size))
Пример #4
0
def write_pymods_from_comp(the_parsed_component_xml, opt, topology_model):
    """
    Writes python modules for a component xml
    "the_parsed_component_xml"
    """
    global BUILD_ROOT
    global DEPLOYMENT
    global VERBOSE

    parsed_port_xml_list = []
    parsed_serializable_xml_list = []
    #uses the topology model to process the items
    #checks if the topology model exists
    if topology_model == None:
        PRINT.info(
            "Topology model was not specified. Please also input a topology model when running this command."
        )
        raise IOError

    port_type_files_list = the_parsed_component_xml.get_port_type_files()

    for port_file in port_type_files_list:
        port_file = search_for_file("Port", port_file)
        xml_parser_obj = XmlPortsParser.XmlPortsParser(port_file)
        parsed_port_xml_list.append(xml_parser_obj)
        del (xml_parser_obj)

    serializable_type_files_list = the_parsed_component_xml.get_serializable_type_files(
    )
    for serializable_file in serializable_type_files_list:
        serializable_file = search_for_file("Serializable", serializable_file)
        xml_parser_obj = XmlSerializeParser.XmlSerializeParser(
            serializable_file
        )  # Telemetry/Params can only use generated serializable types
        # check to make sure that the serializables don't have things that channels and parameters can't have
        # can't have external non-xml members
        if len(xml_parser_obj.get_include_header_files()):
            print(
                "ERROR: Component include serializables cannot use user-defined types. file: "
                % serializable_file)
            sys.exit(-1)

        parsed_serializable_xml_list.append(xml_parser_obj)
        del (xml_parser_obj)

    model = CompFactory.CompFactory.getInstance()
    component_model = model.create(the_parsed_component_xml,
                                   parsed_port_xml_list,
                                   parsed_serializable_xml_list)

    instChannelWriter = InstChannelWriter.InstChannelWriter()
    instCommandWriter = InstCommandWriter.InstCommandWriter()
    instEventWriter = InstEventWriter.InstEventWriter()

    if opt.dict_dir == None:
        if VERBOSE:
            print(
                "Dictionary output directory not specified!, defaulting to cwd"
            )
        opt.dict_dir = os.getcwd()
    os.environ["DICT_DIR"] = opt.dict_dir
    default_dict_generator = GenFactory.GenFactory.getInstance()
    # iterate through command instances
    for command_model in component_model.get_commands():
        if VERBOSE:
            print("Generating command dict %s" % command_model.get_mnemonic())
        instCommandWriter.DictStartWrite(command_model, topology_model)
        instCommandWriter.DictHeaderWrite(command_model, topology_model)
        instCommandWriter.DictBodyWrite(command_model, topology_model)

    for parameter_model in component_model.get_parameters():
        if VERBOSE:
            print("Generating parameter dict %s" % parameter_model.get_name())
        instCommandWriter.DictStartWrite(parameter_model, topology_model)
        instCommandWriter.DictHeaderWrite(parameter_model, topology_model)
        instCommandWriter.DictBodyWrite(parameter_model, topology_model)

    default_dict_generator = GenFactory.GenFactory.getInstance()
    # iterate through command instances
    for event_model in component_model.get_events():
        if VERBOSE:
            print("Generating event dict %s" % event_model.get_name())
        instEventWriter.DictStartWrite(event_model, topology_model)
        instEventWriter.DictHeaderWrite(event_model, topology_model)
        instEventWriter.DictBodyWrite(event_model, topology_model)

    default_dict_generator = GenFactory.GenFactory.getInstance()
    # iterate through command instances
    for channel_model in component_model.get_channels():
        if VERBOSE:
            print("Generating channel dict %s" % channel_model.get_name())
        instChannelWriter.DictStartWrite(channel_model, topology_model)
        instChannelWriter.DictHeaderWrite(channel_model, topology_model)
        instChannelWriter.DictBodyWrite(channel_model, topology_model)
Пример #5
0
def generate_xml_dict(the_parsed_topology_xml, xml_filename, opt):
    """
    Generates GDS XML dictionary from parsed topology XML
    """
    if VERBOSE:
        print("Topology xml type description file: %s" % xml_filename)
    model = TopoFactory.TopoFactory.getInstance()
    topology_model = model.create(the_parsed_topology_xml)

    generator = GenFactory.GenFactory.getInstance()

    #uses the topology model to process the items
    #create list of used parsed component xmls
    parsed_xml_dict = {}
    for comp in the_parsed_topology_xml.get_instances():
        if comp.get_type() in topology_model.get_base_id_dict():
            parsed_xml_dict[comp.get_type()] = comp.get_comp_xml()
        else:
            PRINT.info(
                "Components with type {} aren't in the topology model.".format(
                    comp.get_type()))

    #
    xml_list = []
    for parsed_xml_type in parsed_xml_dict:
        if parsed_xml_dict[parsed_xml_type] == None:
            print(
                "ERROR: XML of type {} is being used, but has not been parsed correctly. Check if file exists or add xml file with the 'import_component_type' tag to the Topology file."
                .format(parsed_xml_type))
            raise Exception()
        xml_list.append(parsed_xml_dict[parsed_xml_type])

    topology_model.set_instance_xml_list(xml_list)

    topology_dict = etree.Element("dictionary")
    topology_dict.attrib["topology"] = the_parsed_topology_xml.get_name()
    # create a new XML tree for dictionary
    enum_list = etree.Element("enums")
    serializable_list = etree.Element("serializables")
    command_list = etree.Element("commands")
    event_list = etree.Element("events")
    telemetry_list = etree.Element("channels")
    parameter_list = etree.Element("parameters")

    for comp in the_parsed_topology_xml.get_instances():
        comp_type = comp.get_type()
        comp_name = comp.get_name()
        comp_id = int(comp.get_base_id())
        PRINT.debug("Processing %s [%s] (%s)" %
                    (comp_name, comp_type, hex(comp_id)))

        # check for included serializable XML
        if (parsed_xml_dict[comp_type].get_serializable_type_files() != None):
            serializable_file_list = parsed_xml_dict[
                comp_type].get_serializable_type_files()
            for serializable_file in serializable_file_list:
                serializable_file = search_for_file("Serializable",
                                                    serializable_file)
                serializable_model = XmlSerializeParser.XmlSerializeParser(
                    serializable_file)
                if (len(serializable_model.get_includes()) != 0):
                    raise Exception(
                        "%s: Can only include one level of serializable for dictionaries"
                        % serializable_file)
                serializable_elem = etree.Element("serializable")
                serializable_type = serializable_model.get_namespace(
                ) + "::" + serializable_model.get_name()
                serializable_elem.attrib["type"] = serializable_type
                members_elem = etree.Element("members")
                for (member_name, member_type, member_size,
                     member_format_specifier,
                     member_comment) in serializable_model.get_members():
                    member_elem = etree.Element("member")
                    member_elem.attrib["name"] = member_name
                    member_elem.attrib[
                        "format_specifier"] = member_format_specifier
                    if member_comment != None:
                        member_elem.attrib["description"] = member_comment
                    if type(member_type) == type(tuple()):
                        enum_value = 0
                        type_name = "%s::%s::%s" % (
                            serializable_type, member_name, member_type[0][1])
                        # Add enum entry
                        enum_elem = etree.Element("enum")
                        enum_elem.attrib["type"] = type_name
                        # Add enum members
                        for (membername, value, comment) in member_type[1]:
                            enum_mem = etree.Element("item")
                            enum_mem.attrib["name"] = membername
                            # keep track of incrementing enum value
                            if value != None:
                                enum_value = int(value)

                            enum_mem.attrib["value"] = "%d" % enum_value
                            enum_value = enum_value + 1
                            if comment != None:
                                enum_mem.attrib["description"] = comment
                            enum_elem.append(enum_mem)
                        enum_list.append(enum_elem)
                    else:
                        type_name = member_type
                        if member_type == "string":
                            member_elem.attrib["len"] = member.get_size()
                    member_elem.attrib["type"] = type_name
                    members_elem.append(member_elem)
                serializable_elem.append(members_elem)

                dup = False
                for ser in serializable_list:
                    if ser.attrib["type"] == serializable_elem.attrib["type"]:
                        dup = True
                if not dup:
                    serializable_list.append(serializable_elem)

        # check for commands
        if (parsed_xml_dict[comp_type].get_commands() != None):
            for command in parsed_xml_dict[comp_type].get_commands():
                if VERBOSE:
                    print("Processing Command %s" % command.get_mnemonic())
                command_elem = etree.Element("command")
                command_elem.attrib["component"] = comp_name
                command_elem.attrib["mnemonic"] = command.get_mnemonic()
                command_elem.attrib["opcode"] = "%s" % (
                    hex(int(command.get_opcodes()[0], base=0) + comp_id))
                if ("comment" in list(command_elem.attrib.keys())):
                    command_elem.attrib["description"] = command_elem.attrib[
                        "comment"]
                args_elem = etree.Element("args")
                for arg in command.get_args():
                    arg_elem = etree.Element("arg")
                    arg_elem.attrib["name"] = arg.get_name()
                    arg_type = arg.get_type()
                    if type(arg_type) == type(tuple()):
                        enum_value = 0
                        type_name = "%s::%s::%s" % (comp_type, arg.get_name(),
                                                    arg_type[0][1])
                        # Add enum entry
                        enum_elem = etree.Element("enum")
                        enum_elem.attrib["type"] = type_name
                        # Add enum members
                        for (membername, value, comment) in arg_type[1]:
                            enum_mem = etree.Element("item")
                            enum_mem.attrib["name"] = membername
                            # keep track of incrementing enum value
                            if value != None:
                                enum_value = int(value)

                            enum_mem.attrib["value"] = "%d" % enum_value
                            enum_value = enum_value + 1
                            if comment != None:
                                enum_mem.attrib["description"] = comment
                            enum_elem.append(enum_mem)
                        enum_list.append(enum_elem)
                    else:
                        type_name = arg_type
                        if arg_type == "string":
                            arg_elem.attrib["len"] = arg.get_size()
                    arg_elem.attrib["type"] = type_name
                    args_elem.append(arg_elem)
                command_elem.append(args_elem)
                command_list.append(command_elem)

        # check for channels
        if (parsed_xml_dict[comp_type].get_channels() != None):
            for chan in parsed_xml_dict[comp_type].get_channels():
                if VERBOSE:
                    print("Processing Channel %s" % chan.get_name())
                channel_elem = etree.Element("channel")
                channel_elem.attrib["component"] = comp_name
                channel_elem.attrib["name"] = chan.get_name()
                channel_elem.attrib["id"] = "%s" % (
                    hex(int(chan.get_ids()[0], base=0) + comp_id))
                if chan.get_format_string() != None:
                    channel_elem.attrib[
                        "format_string"] = chan.get_format_string()
                if chan.get_comment() != None:
                    channel_elem.attrib["description"] = chan.get_comment()

                channel_elem.attrib["id"] = "%s" % (
                    hex(int(chan.get_ids()[0], base=0) + comp_id))
                if ("comment" in list(channel_elem.attrib.keys())):
                    channel_elem.attrib["description"] = channel_elem.attrib[
                        "comment"]
                channel_type = chan.get_type()
                if type(channel_type) == type(tuple()):
                    enum_value = 0
                    type_name = "%s::%s::%s" % (comp_type, chan.get_name(),
                                                channel_type[0][1])
                    # Add enum entry
                    enum_elem = etree.Element("enum")
                    enum_elem.attrib["type"] = type_name
                    # Add enum members
                    for (membername, value, comment) in channel_type[1]:
                        enum_mem = etree.Element("item")
                        enum_mem.attrib["name"] = membername
                        # keep track of incrementing enum value
                        if value != None:
                            enum_value = int(value)

                        enum_mem.attrib["value"] = "%d" % enum_value
                        enum_value = enum_value + 1
                        if comment != None:
                            enum_mem.attrib["description"] = comment
                        enum_elem.append(enum_mem)
                    enum_list.append(enum_elem)
                else:
                    type_name = channel_type
                    if channel_type == "string":
                        channel_elem.attrib["len"] = chan.get_size()
                (lr, lo, ly, hy, ho, hr) = chan.get_limits()
                if (lr != None):
                    channel_elem.attrib["low_red"] = lr
                if (lo != None):
                    channel_elem.attrib["low_orange"] = lo
                if (ly != None):
                    channel_elem.attrib["low_yellow"] = ly
                if (hy != None):
                    channel_elem.attrib["high_yellow"] = hy
                if (ho != None):
                    channel_elem.attrib["high_orange"] = ho
                if (hr != None):
                    channel_elem.attrib["hight_red"] = hr

                channel_elem.attrib["type"] = type_name
                telemetry_list.append(channel_elem)

        # check for events
        if (parsed_xml_dict[comp_type].get_events() != None):
            for event in parsed_xml_dict[comp_type].get_events():
                if VERBOSE:
                    print("Processing Event %s" % event.get_name())
                event_elem = etree.Element("event")
                event_elem.attrib["component"] = comp_name
                event_elem.attrib["name"] = event.get_name()
                event_elem.attrib["id"] = "%s" % (
                    hex(int(event.get_ids()[0], base=0) + comp_id))
                event_elem.attrib["severity"] = event.get_severity()
                format_string = event.get_format_string()
                if ("comment" in list(event_elem.attrib.keys())):
                    event_elem.attrib["description"] = event_elem.attrib[
                        "comment"]
                args_elem = etree.Element("args")
                arg_num = 0
                for arg in event.get_args():
                    arg_elem = etree.Element("arg")
                    arg_elem.attrib["name"] = arg.get_name()
                    arg_type = arg.get_type()
                    if type(arg_type) == type(tuple()):
                        enum_value = 0
                        type_name = "%s::%s::%s" % (comp_type, arg.get_name(),
                                                    arg_type[0][1])
                        # Add enum entry
                        enum_elem = etree.Element("enum")
                        enum_elem.attrib["type"] = type_name
                        # Add enum members
                        for (membername, value, comment) in arg_type[1]:
                            enum_mem = etree.Element("item")
                            enum_mem.attrib["name"] = membername
                            # keep track of incrementing enum value
                            if value != None:
                                enum_value = int(value)

                            enum_mem.attrib["value"] = "%d" % enum_value
                            enum_value = enum_value + 1
                            if comment != None:
                                enum_mem.attrib["description"] = comment
                            enum_elem.append(enum_mem)
                        enum_list.append(enum_elem)
                        # replace enum format string %d with %s for ground system
                        format_string = DictTypeConverter.DictTypeConverter(
                        ).format_replace(format_string, arg_num, 'd', 's')
                    else:
                        type_name = arg_type
                        if arg_type == "string":
                            arg_elem.attrib["len"] = arg.get_size()
                    arg_elem.attrib["type"] = type_name
                    args_elem.append(arg_elem)
                    arg_num += 1
                event_elem.attrib["format_string"] = format_string
                event_elem.append(args_elem)
                event_list.append(event_elem)

        # check for parameters
        if (parsed_xml_dict[comp_type].get_parameters() != None):
            for parameter in parsed_xml_dict[comp_type].get_parameters():
                if VERBOSE:
                    print("Processing Parameter %s" % chan.get_name())
                param_default = None
                command_elem_set = etree.Element("command")
                command_elem_set.attrib["component"] = comp_name
                command_elem_set.attrib["mnemonic"] = parameter.get_name(
                ) + "_PRM_SET"
                command_elem_set.attrib["opcode"] = "%s" % (
                    hex(int(parameter.get_set_opcodes()[0], base=0) + comp_id))
                if ("comment" in list(command_elem.attrib.keys())):
                    command_elem_set.attrib[
                        "description"] = command_elem_set.attrib[
                            "comment"] + " parameter set"
                else:
                    command_elem_set.attrib[
                        "description"] = parameter.get_name(
                        ) + " parameter set"

                args_elem = etree.Element("args")
                arg_elem = etree.Element("arg")
                arg_elem.attrib["name"] = "val"
                arg_type = parameter.get_type()
                if type(arg_type) == type(tuple()):
                    enum_value = 0
                    type_name = "%s::%s::%s" % (comp_type, arg.get_name(),
                                                arg_type[0][1])
                    # Add enum entry
                    enum_elem = etree.Element("enum")
                    enum_elem.attrib["type"] = type_name
                    # Add enum members
                    for (membername, value, comment) in arg_type[1]:
                        enum_mem = etree.Element("item")
                        enum_mem.attrib["name"] = membername
                        # keep track of incrementing enum value
                        if value != None:
                            enum_value = int(value)

                        enum_mem.attrib["value"] = "%d" % enum_value
                        enum_value = enum_value + 1
                        if comment != None:
                            enum_mem.attrib["description"] = comment
                        enum_elem.append(enum_mem)
                        # assign default to be first enum member
                        if param_default == None:
                            param_default = membername
                    enum_list.append(enum_elem)
                else:
                    type_name = arg_type
                    if arg_type == "string":
                        arg_elem.attrib["len"] = arg.get_size()
                    else:
                        param_default = "0"
                arg_elem.attrib["type"] = type_name
                args_elem.append(arg_elem)
                command_elem_set.append(args_elem)
                command_list.append(command_elem_set)

                command_elem_save = etree.Element("command")
                command_elem_save.attrib["component"] = comp_name
                command_elem_save.attrib["mnemonic"] = parameter.get_name(
                ) + "_PRM_SAVE"
                command_elem_save.attrib["opcode"] = "%s" % (hex(
                    int(parameter.get_save_opcodes()[0], base=0) + comp_id))
                if ("comment" in list(command_elem.attrib.keys())):
                    command_elem_save.attrib[
                        "description"] = command_elem_set.attrib[
                            "comment"] + " parameter set"
                else:
                    command_elem_save.attrib[
                        "description"] = parameter.get_name(
                        ) + " parameter save"

                command_list.append(command_elem_save)

                param_elem = etree.Element("parameter")
                param_elem.attrib["component"] = comp_name
                param_elem.attrib["name"] = parameter.get_name()
                param_elem.attrib["id"] = "%s" % (
                    hex(int(parameter.get_ids()[0], base=0) + comp_id))
                if parameter.get_default() != None:
                    param_default = parameter.get_default()
                param_elem.attrib["default"] = param_default

                parameter_list.append(param_elem)

    # Remove duplicates from enum list
    temp_enum_list = []
    for enum_elem in enum_list:
        temp_enum_list.append(enum_elem)
    for enum_elem in temp_enum_list:
        should_remove = False
        for temp_enum in enum_list:
            # Skip over comparisons between same exact element
            if (id(enum_elem) == id(temp_enum)):
                continue

            # Check all attributes
            if temp_enum.attrib["type"] == enum_elem.attrib["type"]:
                should_remove = True
            if not len(temp_enum.getchildren()) == len(
                    enum_elem.getchildren()) and should_remove:
                should_remove = False
            children1 = temp_enum.getchildren()
            children2 = enum_elem.getchildren()
            if children1 and children2:
                i = 0
                while i < len(children1) and i < len(children2):
                    if not children1[i].attrib["name"] == children2[i].attrib[
                            "name"] and should_remove:
                        should_remove = False
                    i += 1
            if should_remove:
                break
        if should_remove:
            enum_list.remove(enum_elem)

    topology_dict.append(enum_list)
    topology_dict.append(serializable_list)
    topology_dict.append(command_list)
    topology_dict.append(event_list)
    topology_dict.append(telemetry_list)
    topology_dict.append(parameter_list)

    fileName = the_parsed_topology_xml.get_xml_filename().replace(
        "Ai.xml", "Dictionary.xml")
    if VERBOSE:
        print("Generating XML dictionary %s" % fileName)
    fd = open(fileName,
              "wb")  #Note: binary forces the same encoding of the source files
    fd.write(etree.tostring(topology_dict, pretty_print=True))
    if VERBOSE:
        print("Generated XML dictionary %s" % fileName)

    return (topology_model)
Пример #6
0
def generate_component(the_parsed_component_xml,
                       xml_filename,
                       opt,
                       topology_model=None):
    """
    Creates a component meta-model, configures visitors and
    generates the component files.  Nothing is returned.
    """
    parsed_port_xml_list = []
    if opt.gen_report:
        report_file = open(f"{xml_filename.replace('Ai.xml', '')}Report.txt",
                           "w")
        num_input_ports = 0
        num_output_ports = 0

        # Count ports

        for port in the_parsed_component_xml.get_ports():
            if port.get_direction() == "input":
                num_input_ports = num_input_ports + int(port.get_max_number())
            if port.get_direction() == "output":
                num_output_ports = num_output_ports + int(
                    port.get_max_number())
        if len(the_parsed_component_xml.get_ports()):
            if num_input_ports:
                report_file.write(f"Input Ports: {num_input_ports}\n")
            if num_output_ports:
                report_file.write(f"Output Ports: {num_output_ports}\n")

        # Count regular commands
        commands = 0
        idList = ""
        if len(the_parsed_component_xml.get_commands()):
            for command in the_parsed_component_xml.get_commands():
                commands += len(command.get_opcodes())
                for opcode in command.get_opcodes():
                    idList += opcode + ","

        # Count parameter commands
        if len(the_parsed_component_xml.get_parameters()):
            for parameter in the_parsed_component_xml.get_parameters():
                commands += len(parameter.get_set_opcodes())
                for opcode in parameter.get_set_opcodes():
                    idList += opcode + ","
                commands += len(parameter.get_save_opcodes())
                for opcode in parameter.get_save_opcodes():
                    idList += opcode + ","

        if commands > 0:
            report_file.write(
                f"Commands: {commands}\n OpCodes: {idList[:-1]}\n")

        if len(the_parsed_component_xml.get_channels()):
            idList = ""
            channels = 0
            for channel in the_parsed_component_xml.get_channels():
                channels += len(channel.get_ids())
                for id in channel.get_ids():
                    idList += id + ","
            report_file.write(
                f"Channels: {channels}\n ChanIds: {idList[:-1]}\n")

        if len(the_parsed_component_xml.get_events()):
            idList = ""
            events = 0
            for event in the_parsed_component_xml.get_events():
                events += len(event.get_ids())
                for id in event.get_ids():
                    idList += id + ","
            report_file.write(f"Events: {events}\n EventIds: {idList[:-1]}\n")

        if len(the_parsed_component_xml.get_parameters()):
            idList = ""
            parameters = 0
            for parameter in the_parsed_component_xml.get_parameters():
                parameters += len(parameter.get_ids())
                for id in parameter.get_ids():
                    idList += id + ","
            report_file.write(
                f"Parameters: {parameters}\n ParamIds: {idList[:-1]}\n")
    #
    # Configure the meta-model for the component
    #
    port_type_files_list = the_parsed_component_xml.get_port_type_files()

    for port_file in port_type_files_list:
        port_file = search_for_file("Port", port_file)
        xml_parser_obj = XmlPortsParser.XmlPortsParser(port_file)
        # print xml_parser_obj.get_args()
        parsed_port_xml_list.append(xml_parser_obj)
        del xml_parser_obj

    parsed_serializable_xml_list = []
    #
    # Configure the meta-model for the component
    #
    serializable_type_files_list = (
        the_parsed_component_xml.get_serializable_type_files())
    for serializable_file in serializable_type_files_list:
        serializable_file = search_for_file("Serializable", serializable_file)
        xml_parser_obj = XmlSerializeParser.XmlSerializeParser(
            serializable_file
        )  # Telemetry/Params can only use generated serializable types
        # check to make sure that the serializables don't have things that channels and parameters can't have
        # can't have external non-xml members
        if len(xml_parser_obj.get_include_header_files()):
            PRINT.info(
                f"ERROR: Component include serializables cannot use user-defined types. file: {serializable_file}"
            )
            sys.exit(-1)

        # print xml_parser_obj.get_args()
        parsed_serializable_xml_list.append(xml_parser_obj)
        del xml_parser_obj

    #
    # for p in the_parsed_component_xml.get_ports():
    #    print p.get_name(), p.get_type()
    # print parsed_port_xml_list

    # for p in parsed_port_xml_list:
    #    print p.get_interface().get_name(), p.get_interface().get_namespace()
    #    print p.get_args()
    #    print p.get_include_header_files()
    #
    generator = CompFactory.CompFactory.getInstance()
    component_model = generator.create(the_parsed_component_xml,
                                       parsed_port_xml_list,
                                       parsed_serializable_xml_list)

    # tv = [x for x in component_model.get_events()]
    # for event_model in component_model.get_events():
    #    event_model.set_ids([1,2,3])
    #    tv.append(event_model)

    #
    # Configure and create the visitors that will generate the code.
    #
    generator = GenFactory.GenFactory.getInstance()
    #
    # Configure each visitor here.
    #
    if "Ai" in xml_filename:
        base = xml_filename.split("Ai")[0]
        h_instance_name = base + "_H"
        cpp_instance_name = base + "_Cpp"
        h_instance_name_tmpl = base + "_Impl_H"
        cpp_instance_name_tmpl = base + "_Impl_Cpp"
        h_instance_test_name = base + "_Test_H"
        cpp_instance_test_name = base + "_Test_Cpp"
        h_instance_gtest_name = base + "_GTest_H"
        cpp_instance_gtest_name = base + "_GTest_Cpp"
        h_instance_test_impl_name = base + "_TestImpl_H"
        cpp_instance_test_impl_name = base + "_TestImpl_Cpp"
    else:
        PRINT.info("Missing Ai at end of file name...")
        raise OSError

    #
    if opt.impl_flag:
        PRINT.info("Enabled generation of implementation template files...")
        generator.configureVisitor(h_instance_name_tmpl, "ImplHVisitor", True,
                                   True)
        generator.configureVisitor(cpp_instance_name_tmpl, "ImplCppVisitor",
                                   True, True)
    elif opt.unit_test:
        PRINT.info("Enabled generation of unit test component files...")
        generator.configureVisitor(h_instance_test_name,
                                   "ComponentTestHVisitor", True, True)
        generator.configureVisitor(cpp_instance_test_name,
                                   "ComponentTestCppVisitor", True, True)
        generator.configureVisitor(h_instance_gtest_name, "GTestHVisitor",
                                   True, True)
        generator.configureVisitor(cpp_instance_gtest_name, "GTestCppVisitor",
                                   True, True)
        generator.configureVisitor(h_instance_test_impl_name,
                                   "TestImplHVisitor", True, True)
        generator.configureVisitor(cpp_instance_test_impl_name,
                                   "TestImplCppVisitor", True, True)
    else:
        generator.configureVisitor(h_instance_name, "ComponentHVisitor", True,
                                   True)
        generator.configureVisitor(cpp_instance_name, "ComponentCppVisitor",
                                   True, True)

    # for port_file in port_type_files_list:
    #    if "Ai" in port_file:
    #        base = port_file.split("Ai")[0]
    #        h_instance_name = base + "_H"
    #        cpp_instance_name = base + "_Cpp"
    #    else:
    #        PRINT.info("Missing Ai at end of file: %s" % port_file)
    #        raise IOError
    #    generator.configureVisitor(h_instance_name, "PortCppVisitor", True, True)
    #    generator.configureVisitor(cpp_instance_name, "PortHVisitor", True, True)
    #
    # The idea here is that each of these generators is used to create
    # a certain portion of each output file.
    #
    initFiles = generator.create("initFiles")
    startSource = generator.create("startSource")
    includes1 = generator.create("includes1")
    includes2 = generator.create("includes2")
    namespace = generator.create("namespace")
    public = generator.create("public")
    protected = generator.create("protected")
    private = generator.create("private")
    finishSource = generator.create("finishSource")

    #
    # Generate the source code products here.
    #
    # 1. Open all the files
    initFiles(component_model)
    #
    # 2. Produce caltech notice here and other starting stuff.
    startSource(component_model)
    #
    # 3. Generate includes that all files get here.
    includes1(component_model)
    #
    # 4. Generate includes from model that a specific here.
    includes2(component_model)
    #
    # 5. Generate start of namespace here.
    namespace(component_model)
    #
    # 6. Generate public class code here.
    public(component_model)
    #
    # 7. Generate protected class code here.
    protected(component_model)
    #
    # 8. Generate private class code here.
    private(component_model)
    #
    # 9. Generate final code here and close all files.
    finishSource(component_model)
    #

    # if requested, generate ground system dictionary
    if opt.default_dict:
        if opt.dict_dir is None:
            PRINT.info("Dictionary output directory not specified!")
            raise OSError
        os.environ["DICT_DIR"] = opt.dict_dir
        default_dict_generator = GenFactory.GenFactory.getInstance()
        # iterate through command instances
        default_dict_generator.configureVisitor("Commands", "CommandVisitor",
                                                True, True)
        for command_model in component_model.get_commands():
            DEBUG.info(f"Processing command {command_model.get_mnemonic()}")
            defaultStartCmd = default_dict_generator.create("DictStart")
            defaultCmdHeader = default_dict_generator.create("DictHeader")
            defaultCmdBody = default_dict_generator.create("DictBody")

            defaultStartCmd(command_model)
            defaultCmdHeader(command_model)
            defaultCmdBody(command_model)

        for parameter_model in component_model.get_parameters():
            DEBUG.info(f"Processing parameter {parameter_model.get_name()}")
            defaultStartCmd = default_dict_generator.create("DictStart")
            defaultCmdHeader = default_dict_generator.create("DictHeader")
            defaultCmdBody = default_dict_generator.create("DictBody")

            defaultStartCmd(parameter_model)
            defaultCmdHeader(parameter_model)
            defaultCmdBody(parameter_model)

        default_dict_generator = GenFactory.GenFactory.getInstance()
        # iterate through command instances
        default_dict_generator.configureVisitor("Events", "EventVisitor", True,
                                                True)
        for event_model in component_model.get_events():
            DEBUG.info(f"Processing event {event_model.get_name()}")
            defaultStartEvent = default_dict_generator.create("DictStart")
            defaultEventHeader = default_dict_generator.create("DictHeader")
            defaultEventBody = default_dict_generator.create("DictBody")

            defaultStartEvent(event_model)
            defaultEventHeader(event_model)
            defaultEventBody(event_model)

        default_dict_generator = GenFactory.GenFactory.getInstance()
        # iterate through command instances
        default_dict_generator.configureVisitor("Channels", "ChannelVisitor",
                                                True, True)
        for channel_model in component_model.get_channels():
            DEBUG.info(f"Processing channel {channel_model.get_name()}")
            defaultStartChannel = default_dict_generator.create("DictStart")
            defaultChannelHeader = default_dict_generator.create("DictHeader")
            defaultChannelBody = default_dict_generator.create("DictBody")

            defaultStartChannel(channel_model)
            defaultChannelHeader(channel_model)
            defaultChannelBody(channel_model)

    if opt.html_docs:
        if opt.html_doc_dir is None:
            PRINT.info("HTML documentation output directory not specified!")
            raise OSError

        os.environ["HTML_DOC_SUBDIR"] = opt.html_doc_dir
        html_doc_generator = GenFactory.GenFactory.getInstance()
        html_doc_generator.configureVisitor(base + "_Html", "HtmlDocVisitor",
                                            True, True)
        htmlStart = html_doc_generator.create("HtmlStart")
        htmlDoc = html_doc_generator.create("HtmlDoc")
        finisher = html_doc_generator.create("finishSource")
        htmlStart(component_model)
        htmlDoc(component_model)
        finisher(component_model)

    if opt.md_docs:
        if opt.md_doc_dir is None:
            PRINT.info("MD documentation output directory not specified!")
            raise OSError

        os.environ["MD_DOC_SUBDIR"] = opt.md_doc_dir
        md_doc_generator = GenFactory.GenFactory.getInstance()
        md_doc_generator.configureVisitor(base + "_Md", "MdDocVisitor", True,
                                          True)
        mdStart = md_doc_generator.create("MdStart")
        mdDoc = md_doc_generator.create("MdDoc")
        finisher = md_doc_generator.create("finishSource")
        mdStart(component_model)
        mdDoc(component_model)
        finisher(component_model)
Пример #7
0
def generate_component_instance_dictionary(the_parsed_component_xml, opt,
                                           topology_model):
    global DEPLOYMENT

    #
    parsed_port_xml_list = []
    parsed_serializable_xml_list = []
    # uses the topology model to process the items
    # checks if the topology model exists
    if topology_model is None:
        PRINT.info(
            "Topology model was not specified. Please also input a topology model when running this command."
        )
        raise OSError

    port_type_files_list = the_parsed_component_xml.get_port_type_files()

    for port_file in port_type_files_list:
        port_file = search_for_file("Port", port_file)
        xml_parser_obj = XmlPortsParser.XmlPortsParser(port_file)
        # print xml_parser_obj.get_args()
        parsed_port_xml_list.append(xml_parser_obj)
        del xml_parser_obj

    serializable_type_files_list = (
        the_parsed_component_xml.get_serializable_type_files())
    for serializable_file in serializable_type_files_list:
        serializable_file = search_for_file("Serializable", serializable_file)
        xml_parser_obj = XmlSerializeParser.XmlSerializeParser(
            serializable_file
        )  # Telemetry/Params can only use generated serializable types
        # check to make sure that the serializables don't have things that channels and parameters can't have
        # can't have external non-xml members
        if len(xml_parser_obj.get_include_header_files()):
            PRINT.info(
                f"ERROR: Component include serializables cannot use user-defined types. file: {serializable_file}"
            )
            sys.exit(-1)

        # print xml_parser_obj.get_args()
        parsed_serializable_xml_list.append(xml_parser_obj)
        del xml_parser_obj

    generator = CompFactory.CompFactory.getInstance()
    component_model = generator.create(the_parsed_component_xml,
                                       parsed_port_xml_list,
                                       parsed_serializable_xml_list)

    if opt.default_topology_dict:
        default_dict_generator = GenFactory.GenFactory.getInstance()
        # iterate through command instances
        default_dict_generator.configureVisitor("Commands",
                                                "InstanceCommandVisitor", True,
                                                True)
        for command_model in component_model.get_commands():
            DEBUG.info(f"Processing command {command_model.get_mnemonic()}")
            defaultStartCmd = default_dict_generator.create(
                "InstanceDictStart")
            defaultCmdHeader = default_dict_generator.create(
                "InstanceDictHeader")
            defaultCmdBody = default_dict_generator.create("InstanceDictBody")

            defaultStartCmd(command_model, topology_model)
            defaultCmdHeader(command_model, topology_model)
            defaultCmdBody(command_model, topology_model)

        for parameter_model in component_model.get_parameters():
            DEBUG.info(f"Processing parameter {parameter_model.get_name()}")
            defaultStartCmd = default_dict_generator.create(
                "InstanceDictStart")
            defaultCmdHeader = default_dict_generator.create(
                "InstanceDictHeader")
            defaultCmdBody = default_dict_generator.create("InstanceDictBody")

            defaultStartCmd(parameter_model, topology_model)
            defaultCmdHeader(parameter_model, topology_model)
            defaultCmdBody(parameter_model, topology_model)

        default_dict_generator = GenFactory.GenFactory.getInstance()
        # iterate through command instances
        default_dict_generator.configureVisitor("Events",
                                                "InstanceEventVisitor", True,
                                                True)
        for event_model in component_model.get_events():
            DEBUG.info(f"Processing event {event_model.get_name()}")
            defaultStartEvent = default_dict_generator.create(
                "InstanceDictStart")
            defaultEventHeader = default_dict_generator.create(
                "InstanceDictHeader")
            defaultEventBody = default_dict_generator.create(
                "InstanceDictBody")

            defaultStartEvent(event_model, topology_model)
            defaultEventHeader(event_model, topology_model)
            defaultEventBody(event_model, topology_model)

        default_dict_generator = GenFactory.GenFactory.getInstance()
        # iterate through command instances
        default_dict_generator.configureVisitor("Channels",
                                                "InstanceChannelVisitor", True,
                                                True)
        for channel_model in component_model.get_channels():
            DEBUG.info(f"Processing channel {channel_model.get_name()}")
            defaultStartChannel = default_dict_generator.create(
                "InstanceDictStart")
            defaultChannelHeader = default_dict_generator.create(
                "InstanceDictHeader")
            defaultChannelBody = default_dict_generator.create(
                "InstanceDictBody")

            defaultStartChannel(channel_model, topology_model)
            defaultChannelHeader(channel_model, topology_model)
            defaultChannelBody(channel_model, topology_model)
Пример #8
0
def main():
    """
    Main program.
    """
    global ERROR  # prevent local creation of variable
    global VERBOSE  # prevent local creation of variable
    global GEN_TEST_CODE  # indicate if test code should be generated
    global DEPLOYMENT  # deployment set in topology xml only and used to install new instance dicts

    ERROR = False
    # Sets up the initial (singleton) instance
    ConfigManager.ConfigManager.getInstance()
    Parser = pinit()
    (opt, args) = Parser.parse_args()
    VERBOSE = opt.verbose_flag

    # Check that the specified working directory exists. Remember, the
    # default working directory is the current working directory which
    # always exists. We are basically only checking for when the user
    # specifies an alternate working directory.

    if os.path.exists(opt.work_path) == False:
        Parser.error(f"Specified path does not exist ({opt.work_path})!")

    working_dir = opt.work_path

    # Get the current working directory so that we can return to it when
    # the program completes. We always want to return to the place where
    # we started.

    starting_directory = os.getcwd()
    os.chdir(working_dir)
    # print working_dir
    # print os.getcwd()

    # Configure the logging.
    log_level = opt.logger.upper()
    log_level_dict = dict()

    log_level_dict["QUIET"] = None
    log_level_dict["DEBUG"] = logging.DEBUG
    log_level_dict["INFO"] = logging.INFO
    log_level_dict["WARNING"] = logging.WARN
    log_level_dict["ERROR"] = logging.ERROR
    log_level_dict["CRITICAL"] = logging.CRITICAL

    if log_level_dict[log_level] is None:
        stdout_enable = False
    else:
        stdout_enable = True

    log_fd = opt.logger_output
    # For now no log file

    Logger.connectDebugLogger(log_level_dict[log_level], log_fd, stdout_enable)
    Logger.connectOutputLogger(log_fd)
    #
    #  Parse the input Component XML file and create internal meta-model
    #
    if len(args) == 0:
        PRINT.info(f"Usage: {sys.argv[0]} [options] xml_filename")
        return
    else:
        xml_filenames = args[0:]
    #
    # Check for BUILD_ROOT variable for XML port searches
    #
    if opt.build_root_flag == True:
        # Check for BUILD_ROOT env. variable
        if ("BUILD_ROOT" in list(os.environ.keys())) == False:
            PRINT.info(
                "ERROR: The -b command option requires that BUILD_ROOT environmental variable be set to root build path..."
            )
            sys.exit(-1)
        else:
            set_build_roots(os.environ.get("BUILD_ROOT"))

    for xml_filename in xml_filenames:

        xml_type = XmlParser.XmlParser(xml_filename)()

        if xml_type == "component":
            DEBUG.info(
                "Detected Component XML so Generating Component C++ Files...")
            the_parsed_component_xml = XmlComponentParser.XmlComponentParser(
                xml_filename)
            generate_component(the_parsed_component_xml,
                               os.path.basename(xml_filename), opt)
            dependency_parser = the_parsed_component_xml
        elif xml_type == "interface":
            DEBUG.info(
                "Detected Port type XML so Generating Port type C++ Files...")
            the_parsed_port_xml = XmlPortsParser.XmlPortsParser(xml_filename)
            generate_port(the_parsed_port_xml, os.path.basename(xml_filename))
            dependency_parser = the_parsed_port_xml
        elif xml_type == "serializable":
            DEBUG.info(
                "Detected Serializable XML so Generating Serializable C++ Files..."
            )
            the_serial_xml = XmlSerializeParser.XmlSerializeParser(
                xml_filename)
            generate_serializable(the_serial_xml, opt)
            dependency_parser = the_serial_xml
        elif xml_type == "assembly" or xml_type == "deployment":
            DEBUG.info(
                "Detected Topology XML so Generating Topology C++ Files...")
            the_parsed_topology_xml = XmlTopologyParser.XmlTopologyParser(
                xml_filename)
            DEPLOYMENT = the_parsed_topology_xml.get_deployment()
            print("Found assembly or deployment named: %s\n" % DEPLOYMENT)
            generate_topology(the_parsed_topology_xml,
                              os.path.basename(xml_filename), opt)
            dependency_parser = the_parsed_topology_xml
        elif xml_type == "enum":
            DEBUG.info(
                "Detected Enum XML so Generating hpp, cpp, and py files...")
            curdir = os.getcwd()
            if EnumGenerator.generate_enum(xml_filename):
                ERROR = False
                PRINT.info(
                    f"Completed generating files for {xml_filename} Enum XML...."
                )
            else:
                ERROR = True
            os.chdir(curdir)
        elif xml_type == "array":
            DEBUG.info(
                "Detected Array XML so Generating hpp, cpp, and py files...")
            curdir = os.getcwd()
            if ArrayGenerator.generate_array(xml_filename):
                ERROR = False
                PRINT.info(
                    f"Completed generating files for {xml_filename} Array XML..."
                )
            else:
                ERROR = True
            os.chdir(curdir)
        else:
            PRINT.info("Invalid XML found...this format not supported")
            ERROR = True

        if opt.dependency_file is not None:
            if opt.build_root_flag:
                generate_dependency_file(
                    opt.dependency_file,
                    os.path.basename(xml_filename),
                    list(get_build_roots())[0],
                    dependency_parser,
                    xml_type,
                )

    # Always return to directory where we started.
    os.chdir(starting_directory)

    if ERROR == True:
        sys.exit(-1)
    else:
        sys.exit(0)
Пример #9
0
def main():

    parser = pinit()
    (opts, args) = parser.parse_args()

    Logger.connectOutputLogger(opts.logger_output)

    # Global logger init. below.
    PRINT = logging.getLogger("output")

    #
    #  Parse the input Component XML file and create internal meta-model
    #
    if len(args) == 0:
        PRINT.info("\nUsage: %s [options] xml_filename" % sys.argv[0])
        PRINT.info("ERROR: Cannot create dictionary\n")
        sys.exit(-1)
        return
    else:
        xmlFilename = args[0]

    #
    # Check for BUILD_ROOT variable for XML port searches
    #
    if not opts.build_root_overwrite is None:
        set_build_roots(opts.build_root_overwrite)
    else:
        if ("BUILD_ROOT" in os.environ.keys()) == False:
            print("ERROR: Build root not set to root build path...")
            sys.exit(-1)
        set_build_roots(os.environ["BUILD_ROOT"])

    parsedTopology = XmlTopologyParser.XmlTopologyParser(xmlFilename)
    deployment = parsedTopology.get_deployment()
    outFilename = deployment + "Dictionary.json"
    outFilepath = "/".join([opts.work_path, outFilename])
    descriptionFilename = "/".join([opts.work_path, "/dictPath.txt"])

    dictionary = {}
    dictionary[deployment] = {
        "events": {},
        "channels": {},
        "commands": {},
        "serializables": {},
    }

    events = dictionary[deployment]["events"]
    channels = dictionary[deployment]["channels"]
    commands = dictionary[deployment]["commands"]
    serializables = dictionary[deployment]["serializables"]
    limitLabels = [
        "low_red",
        "low_orange",
        "low_yellow",
        "high_yellow",
        "high_orange",
        "high_red",
    ]
    unitLabels = ["label", "gain", "offset"]

    instanceIDs = {"events": {}, "channels": {}, "commands": {}}

    for inst in parsedTopology.get_instances():
        serializableFilenames = inst.get_comp_xml().get_serializable_type_files()
        for filename in serializableFilenames:
            for build_root in get_build_roots():
                if os.path.exists(os.path.join(build_root, filename)):
                    break
            else:
                raise FileNotFoundError(os.path.join(build_root, filename))

            parsedSerializable = XmlSerializeParser.XmlSerializeParser(
                os.path.join(build_root, filename)
            )
            name = parsedSerializable.get_name()
            namespace = parsedSerializable.get_namespace()
            members = []
            membersRaw = parsedSerializable.get_members()
            for member in membersRaw:
                members.append(
                    {
                        "name": member[0],
                        "type": format_type_item(member[1]),
                        "size": member[2],
                        "format_string": member[3],
                        "comment": member[4],
                    }
                )

            metadata = {
                "name": name,
                "namespace": namespace,
                "members": members,
            }

            serializables[namespace + "::" + name] = metadata
        comp_name = inst.get_name()
        comp_type = inst.get_type()
        comp_namespace = inst.get_namespace()
        component = "::".join([comp_namespace, comp_type])
        base_id = inst.get_base_id()
        if "0x" in base_id:
            base_id = int(base_id, 16)
        else:
            base_id = int(base_id)
        comp_parser = inst.get_comp_xml()
        comp_dir = dir(comp_parser)

        if "get_commands" in comp_dir:
            for command in comp_parser.get_commands():
                opcode = command.get_opcodes()[0]
                opcode = int(opcode, 16) if ("0x" in opcode) else int(opcode)
                opcode += base_id

                name = command.get_mnemonic()
                if name in instanceIDs["commands"]:
                    instanceIDs["commands"][name].append(opcode)
                else:
                    instanceIDs["commands"][name] = [opcode]

                arguments = []
                for arg in command.get_args():
                    typeItem = arg.get_type()
                    typeObj = format_type_item(typeItem)
                    arguments.append(typeObj)

                metadata = {
                    "id": opcode,
                    "name": name,
                    "instance": comp_name,
                    "description": command.get_comment(),
                    "component": component,
                    "arguments": arguments,
                }

                commands[opcode] = metadata

        if "get_events" in comp_dir:
            for event in comp_parser.get_events():
                ev_id = event.get_ids()[0]
                ev_id = int(ev_id, 16) if ("0x" in ev_id) else int(ev_id)
                ev_id += base_id

                name = event.get_name()
                if name in instanceIDs["events"]:
                    instanceIDs["events"][name].append(ev_id)
                else:
                    instanceIDs["events"][name] = [ev_id]

                arguments = []
                for arg in event.get_args():
                    typeItem = arg.get_type()
                    typeObj = format_type_item(typeItem)
                    arguments.append(typeObj)

                metadata = {
                    "id": ev_id,
                    "description": event.get_comment(),
                    "name": name,
                    "instance": comp_name,
                    "component": component,
                    "format_string": event.get_format_string(),
                    "severity": event.get_severity(),
                    "telem_type": "event",
                    "arguments": arguments,
                }

                events[ev_id] = metadata

        if "get_channels" in comp_dir:
            for channel in comp_parser.get_channels():
                ch_id = channel.get_ids()[0]
                ch_id = int(ch_id, 16) if ("0x" in ch_id) else int(ch_id)
                ch_id += base_id

                name = channel.get_name()
                if name in instanceIDs["channels"]:
                    instanceIDs["channels"][name].append(ch_id)
                else:
                    instanceIDs["channels"][name] = [ch_id]

                units = []
                for unit in channel.get_units():
                    units.append(dict(list(zip(unitLabels, unit))))

                typeObj = channel.get_type()
                type_name = ""
                if isinstance(typeObj, str):
                    type_name = typeObj
                else:
                    type_name = "Enum"
                    enum_dict = {}
                    for (i, enum) in enumerate(typeObj[1]):
                        enum_dict[str(i)] = enum[0]

                metadata = {
                    "id": ch_id,
                    "name": name,
                    "instance": comp_name,
                    "description": channel.get_comment(),
                    "telem_type": "channel",
                    "component": component,
                    "format_string": channel.get_format_string(),
                    "limits": dict(list(zip(limitLabels, channel.get_limits()))),
                    "type": type_name,
                    "units": units,
                }

                if type_name == "Enum":
                    metadata["enum_dict"] = enum_dict
                    metadata["format_string"] = "%s"

                channels[ch_id] = metadata

    # Prepend instance name to commands, events, and channels with duplicate component types
    # PRINT.info(json.dumps(instanceIDs, indent=4))
    for telemetryType, idDict in list(instanceIDs.items()):
        for name, ids in list(idDict.items()):
            if len(ids) > 1:
                for id in ids:
                    telem = dictionary[deployment][telemetryType][id]
                    name = telem["name"]
                    instanceName = telem["instance"]
                    name = "_".join([instanceName, name])
                    telem["name"] = name

    # Stringify JSON -- indent option makes it readable, can be removed if file
    # size is an issue
    jsonStr = json.dumps(dictionary, indent=4)

    # Create output directory if it doesn't exist
    directory = os.path.dirname(outFilepath)
    if not os.path.exists(directory):
        os.makedirs(directory)

    # Write JSON to file
    outFile = open(outFilepath, "w")
    outFile.write(jsonStr)
    descriptionFile = open(descriptionFilename, "w")
    descriptionFile.write(outFilepath)
    PRINT.info("\nJSON output written to %s" % outFilepath)
    outFile.close()