Exemple #1
0
    def convert_next_token(self, parsed_object, parsed_object_iter, pnl,
                           synthesized_pnls):
        """
        Takes individual parsed objects from the parsed line object

        Populate ParsedNetlistLine class with all information necessary to create a Statement

        Many hacks contained here
        """

        if parsed_object.types[
                0] == SpiritCommon.data_model_type.BLOCK_DELIMITER:

            if parsed_object.value == "{":

                self._delimited_block = True

            else:
                self._delimited_block = False

                if self._if_statement:

                    self._if_statement = False
                    self._comment_end_of_if_statement = True

        elif self._if_statement:

            pnl.type = "COMMENT"

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.DIRECTIVE_NAME or parsed_object.types[
                    0] == SpiritCommon.data_model_type.DEVICE_TYPE:

            if spectre_to_adm_model_type_map.get(parsed_object.value):

                pnl.type = spectre_to_adm_model_type_map[parsed_object.value]
                pnl.local_type = parsed_object.value

            else:

                logging.warning(
                    "Possible error. Spectre type not recognized: " +
                    str(parsed_object.value))

            # If directive is .GLOBAL, for now get rid of first listed node. This first node is
            # considered a ground node.
            if pnl.type == ".GLOBAL":

                next(parsed_object_iter)

            if pnl.type == ".IF":

                pnl.type = "COMMENT"
                pnl.add_comment(parsed_object.value)

                self._if_statement = True

            elif pnl.type == ".ELSE":

                pnl.type = "COMMENT"
                pnl.add_comment(parsed_object.value)

                self._if_statement = True
                self._comment_end_of_if_statement = False

            elif pnl.type == ".ELSEIF":

                pnl.type = "COMMENT"
                pnl.add_comment(parsed_object.value)

                self._if_statement = True
                self._comment_end_of_if_statement = False

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.MODEL_NAME and not pnl.type == ".MODEL":

            if spectre_to_adm_model_type_map.get(parsed_object.value):

                pnl.type = spectre_to_adm_model_type_map[parsed_object.value]
                pnl.local_type = parsed_object.value

            else:

                pnl.add_known_object(parsed_object.value, Types.modelName)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.MODEL_TYPE and pnl.type == ".MODEL":

            adm_type = spectre_to_adm_model_type_map.get(parsed_object.value)

            # For Spectre, different models aren't distinguished by a "LEVEL" parameter. Instead,
            # it uses a name to distinguish what model is being used (ex., bsimsoi instead of
            # LEVEL=10, or vbic instead of LEVEL=10).
            if adm_type == "M" or adm_type == "Q" or adm_type == "J":

                pnl.add_param_value_pair("LEVEL", parsed_object.value)

            if not adm_type:

                adm_type = parsed_object.value

            # Default to NMOS for type
            if adm_type == "M":

                pnl.add_known_object("NMOS", Types.modelType)
                pnl.add_param_value_pair("type", "N")

            elif adm_type == "J":

                pnl.add_known_object("NJF", Types.modelType)
                pnl.add_param_value_pair("type", "N")

            else:

                pnl.add_known_object(adm_type, Types.modelType)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.DEVICE_NAME or (
                    parsed_object.types[0]
                    == SpiritCommon.data_model_type.MODEL_NAME
                    and pnl.type == ".MODEL"):

            pnl.name = parsed_object.value

        elif pnl.type == ".DC":

            # .DC and .AC directives need four PARAM_NAME/PARAM_VALUE pairs - a sweep variable name,
            # a start value, a stop value, and a step value
            if not pnl.sweep_param_list:

                pnl.add_unused_sweep_params("dc")
                sweep_list = ["", "", "", ""]

                for sweep_item in sweep_list:

                    pnl.add_sweep_param_value(sweep_item)

            if parsed_object.types[
                    0] == SpiritCommon.data_model_type.DC_SWEEP_DEV:

                pnl.add_unused_sweep_params("dev=" + parsed_object.value)

                # Only save if dc analysis does not involve a param
                if not pnl.sweep_param_list[0]:
                    pnl.sweep_param_list[0] = parsed_object.value
                    pnl.flag_unresolved_device = True

            elif parsed_object.types[
                    0] == SpiritCommon.data_model_type.DC_SWEEP_PARAM:

                pnl.add_unused_sweep_params("param=" + parsed_object.value)

                if not parsed_object.value == "dc":

                    # Overwrite dc analysis with dev if it exists, reset unresolved
                    # device flag to False
                    pnl.sweep_param_list[0] = parsed_object.value
                    pnl.flag_unresolved_device = False

            elif parsed_object.types[
                    0] == SpiritCommon.data_model_type.DC_SWEEP_START:

                pnl.sweep_param_list[1] = parsed_object.value

            elif parsed_object.types[
                    0] == SpiritCommon.data_model_type.DC_SWEEP_STOP:

                pnl.sweep_param_list[2] = parsed_object.value

            elif parsed_object.types[
                    0] == SpiritCommon.data_model_type.DC_SWEEP_STEP:

                pnl.sweep_param_list[3] = parsed_object.value

            elif parsed_object.types[
                    0] == SpiritCommon.data_model_type.PARAM_NAME:

                sweep_param_name = parsed_object.value
                sweep_parsed_object = next(parsed_object_iter)

                if not sweep_parsed_object.types[
                        0] == SpiritCommon.data_model_type.PARAM_VALUE:

                    logging.error(
                        "Line(s):" + str(pnl.linenum) +
                        ". Parser passed wrong token.  Expected PARAM_VALUE.  Got "
                        + str(sweep_parsed_object.types[0]))
                    raise Exception(
                        "Next Token is not a PARAM_VALUE.  Something went wrong!"
                    )

                sweep_param_value = sweep_parsed_object.value
                pnl.add_unused_sweep_params(sweep_param_name + "=" +
                                            sweep_param_value)

        # For translation of port instance parameters to names recognized internally by XDM
        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.PARAM_NAME and pnl.type == "P":

            param_value_parsed_object = next(parsed_object_iter)

            if parsed_object.value == "num":

                pnl.add_param_value_pair("PORT",
                                         param_value_parsed_object.value)

            elif parsed_object.value == "r":

                pnl.add_param_value_pair("Z0", param_value_parsed_object.value)

            elif parsed_object.value == "mag":

                pnl.add_param_value_pair("AC", param_value_parsed_object.value)

            elif parsed_object.value == "type":

                pass

            else:

                pnl.add_param_value_pair(parsed_object.value.upper(),
                                         param_value_parsed_object.value)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.GENERALNODE and not pnl.type in [
                    ".IC", ".DCVOLT", ".NODESET"
                ]:

            output_node = parsed_object.value

            if BoostParserInterface.boost_xdm_map_dict[parsed_object.types[
                    0]] in pnl.known_objects and pnl.type == ".GLOBAL":

                pnl_synth = ParsedNetlistLine(pnl.filename, pnl.linenum)
                pnl_synth.type = ".GLOBAL"
                pnl_synth.local_type = ".GLOBAL"
                pnl_synth.add_known_object(
                    output_node, BoostParserInterface.boost_xdm_map_dict[
                        parsed_object.types[0]])
                synthesized_pnls.append(pnl_synth)

            else:

                pnl.add_known_object(
                    output_node, BoostParserInterface.boost_xdm_map_dict[
                        parsed_object.types[0]])

        elif parsed_object.types[0] == SpiritCommon.data_model_type.PARAM_NAME:

            # For Spectre, the polarity of the device (ex. NMOS or PMOS, or NPN or PNP)
            # isn't declared as a separate identifier in the .MODEL statement. Instead,
            # it is saved as a model parameter called "type". The polarity needs to be
            # extracted and saved in the data model consistent with SPICE parsing
            if pnl.type == ".MODEL" and parsed_object.value.upper() == "TYPE":

                param_value_parsed_object = next(parsed_object_iter)

                if pnl.known_objects.get(Types.modelType).endswith("MOS"):

                    pnl.add_known_object(
                        param_value_parsed_object.value.upper() + "MOS",
                        Types.modelType)

                elif pnl.known_objects.get(Types.modelType).endswith("JF"):

                    pnl.add_known_object(
                        param_value_parsed_object.value.upper() + "JF",
                        Types.modelType)

                else:

                    pnl.add_known_object(param_value_parsed_object.value,
                                         Types.modelType)

                pnl.add_param_value_pair(parsed_object.value,
                                         param_value_parsed_object.value)

            elif pnl.type == ".MODEL" and parsed_object.value.upper(
            ) == "VERSION":

                param_value_parsed_object = next(parsed_object_iter)
                pnl.add_param_value_pair(parsed_object.value.upper(),
                                         param_value_parsed_object.value)

            elif not parsed_object.value == "wave":

                param_value_parsed_object = next(parsed_object_iter)

                if pnl.type and pnl.type == ".TRAN":

                    self.set_tran_param(pnl, parsed_object.value,
                                        param_value_parsed_object.value)

                elif pnl.type and pnl.type == "V" or pnl.type == "I":

                    processed_value = param_value_parsed_object.value

                    # Some source paramters don't need curly braces, such as:
                    # The "type" parameter indicates source type, such as PULSE or PWL.
                    # The "file" parameter indicates the file to be opened.
                    if not parsed_object.value == "type" and not parsed_object.value == "file":

                        processed_value, msg = convert_to_xyce(processed_value)

                    processed_value = self.hack_ternary_operator(
                        processed_value)
                    pnl.source_params[parsed_object.value] = processed_value

                else:

                    if param_value_parsed_object.types[
                            0] != SpiritCommon.data_model_type.PARAM_VALUE:

                        raise Exception(
                            "Next Token is not a PARAM_VALUE.  Something went wrong!"
                        )

                    if (parsed_object.value.upper()
                            == "M") and pnl.type not in ['R', 'L', 'C']:

                        pnl.m_param = param_value_parsed_object.value

                    msg = None
                    # expression = None
                    if param_value_parsed_object.value.startswith(
                            '[') and param_value_parsed_object.value.endswith(
                                ']'):

                        expression = param_value_parsed_object.value

                    elif is_a_number(param_value_parsed_object.value):

                        processed_value = param_value_parsed_object.value
                        expression = convert_si_unit_prefix(processed_value)

                    else:

                        # For parameters that refer to control devices, skip convert_to_xyce
                        # In the future, this will include cccs, etc.
                        processed_value, msg = convert_to_xyce(
                            param_value_parsed_object.value)
                        expression = self.hack_ternary_operator(
                            processed_value)

                    if expression:

                        pnl.add_param_value_pair(parsed_object.value,
                                                 expression)

                    else:

                        pnl.add_param_value_pair(
                            parsed_object.value,
                            param_value_parsed_object.value)

                    if msg:

                        logging.warning("Error in expression: " + msg +
                                        str(parsed_object.value))

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.DC_VALUE_VALUE:

            processed_value, msg = convert_to_xyce(parsed_object.value)
            processed_value = self.hack_ternary_operator(processed_value)

            pnl.add_lazy_statement(
                processed_value, BoostParserInterface.boost_xdm_map_dict[
                    parsed_object.types[0]])

        elif parsed_object.types[0] in [
                SpiritCommon.data_model_type.AC_MAG_VALUE,
                SpiritCommon.data_model_type.AC_PHASE_VALUE
        ]:

            processed_value, msg = convert_to_xyce(parsed_object.value)
            processed_value = self.hack_ternary_operator(processed_value)

            if parsed_object.types[
                    0] == SpiritCommon.data_model_type.AC_MAG_VALUE:
                pnl.add_known_object("AC", Types.acValue)

            pnl.add_known_object(
                processed_value, BoostParserInterface.boost_xdm_map_dict[
                    parsed_object.types[0]])

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.CONTROL_DEVICE:

            control_dev_name_obj = next(parsed_object_iter)

            if control_dev_name_obj.types[
                    0] != SpiritCommon.data_model_type.CONTROL_DEVICE_NAME:
                logging.error(
                    "Line(s):" + str(pnl.linenum) +
                    ". Parser passed wrong token.  Expected CONTROL_DEVICE_NAME.  Got "
                    + str(control_dev_name_obj.types[0]))
                raise Exception(
                    "Next Token is not a CONTROL_DEVICE_NAME.  Something went wrong!"
                )

            pnl.add_control_param_value(control_dev_name_obj.value)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.OUTPUT_VARIABLE:

            formatted_output_variable = format_output_variable(
                parsed_object.value)
            pnl.add_output_variable_value(formatted_output_variable)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.UNKNOWN_NODE:

            pnl.add_unknown_node(parsed_object.value)

        elif parsed_object.types[0] == SpiritCommon.data_model_type.COMMENT:

            # If a comment comes in the middle of a delimited block, synthesize a PNL
            # object for the comment and leave the original PNL unmolested
            if self._delimited_block:
                pnl_synth = ParsedNetlistLine(pnl.filename, [pnl.linenum[-1]])
                pnl_synth.type = "COMMENT"
                pnl_synth.name = parsed_object.value
                pnl_synth.add_comment(parsed_object.value)
                synthesized_pnls.append(pnl_synth)

            else:
                pnl.type = "COMMENT"
                pnl.name = parsed_object.value
                pnl.add_comment(parsed_object.value)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.LIB_ENTRY and pnl.type and not pnl.type == ".ENDL":

            # convert to .lib from .include
            pnl.type = ".LIB"
            pnl.add_known_object(
                parsed_object.value, BoostParserInterface.boost_xdm_map_dict[
                    parsed_object.types[0]])

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.FUNC_EXPRESSION:

            processed_value, msg = convert_to_xyce(parsed_object.value)
            processed_value = self.hack_ternary_operator(processed_value)

            if not processed_value.startswith("{"):
                processed_value = "{" + processed_value + "}"

            pnl.add_known_object(
                processed_value, BoostParserInterface.boost_xdm_map_dict[
                    parsed_object.types[0]])

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.CONDITIONAL_STATEMENT:

            comment = pnl.params_dict[Types.comment] + parsed_object.value
            pnl.add_comment(comment)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.BINNED_MODEL_NAME:

            # if "." already in model name, need to create synthesized pnl for next
            # binned model
            if "." in pnl.name:

                model_name = pnl.name.split(".")[0]
                pnl_synth = ParsedNetlistLine(pnl.filename, [pnl.linenum[-1]])
                pnl_synth.type = ".MODEL"
                pnl_synth.local_type = "model"
                pnl_synth.name = model_name + "." + parsed_object.value
                pnl_synth.add_param_value_pair("LEVEL",
                                               pnl.params_dict["LEVEL"])
                pnl_synth.add_known_object(pnl.known_objects["MODEL_TYPE"],
                                           Types.modelType)
                synthesized_pnls.append(pnl_synth)
                self._modify_synth_pnl = True

            else:

                pnl.name = pnl.name + "." + parsed_object.value

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.VOLTAGE or parsed_object.types[
                    0] == SpiritCommon.data_model_type.CURRENT:

            expression_obj = next(parsed_object_iter)

            if expression_obj.types[
                    0] != SpiritCommon.data_model_type.EXPRESSION:

                logging.error(
                    "Line(s):" + str(pnl.linenum) +
                    ". Parser passed wrong token.  Expected EXPRESSION.  Got "
                    + str(expression_obj.types[0]))
                raise Exception(
                    "Next Token is not a EXPRESSION.  Something went wrong!")

            processed_value, msg = convert_to_xyce(expression_obj.value)
            processed_value = self.hack_ternary_operator(processed_value)
            pnl.add_known_object(processed_value, Types.expression)

            if parsed_object.types[0] == SpiritCommon.data_model_type.VOLTAGE:

                pnl.add_known_object(processed_value, Types.voltage)

            if parsed_object.types[0] == SpiritCommon.data_model_type.CURRENT:

                pnl.add_known_object(processed_value, Types.current)

        else:

            if is_a_number(parsed_object.value):

                parsed_object.value = convert_si_unit_prefix(
                    parsed_object.value)

            XyceNetlistBoostParserInterface.convert_next_token(
                parsed_object, parsed_object_iter, pnl, synthesized_pnls)
Exemple #2
0
    def read(self):
        """
        .. _reader_read:

        Iterates over an iterable grammar's component
        statements, and registers relevant components

        """
        import sys
        inc_files_and_scopes = []
        lib_files = []  # tuple list (file name, lib name)
        control_device_handling_list = []
        debug_incfiles = False
        platform = sys.platform

        grammar_iter = iter(self._grammar)
        # iterates through each grammar "line"
        for parsed_netlist_line in grammar_iter:
            self._last_line = self.read_line(parsed_netlist_line, self._reader_state, self._top_reader_state,
                                             self._language_definition, control_device_handling_list, inc_files_and_scopes, lib_files)
            self._statement_count += 1
            if self._language_changed:
                break

        # Add in default TNOM value, if not the same as Xyce's 27C
        if self._is_top_level_file and not self._grammar.tnom_defined and self._grammar.tnom_value != "27":
            parsed_netlist_line = ParsedNetlistLine(self._file, [0])
            parsed_netlist_line.type = ".OPTIONS"
            parsed_netlist_line.local_type = ".OPTIONS"
            parsed_netlist_line.add_known_object("DEVICE", Types.optionPkgTypeValue)
            parsed_netlist_line.add_param_value_pair("TNOM", self._grammar.tnom_value)
            self._last_line = self.read_line(parsed_netlist_line, self._reader_state, self._top_reader_state,
                                             self._language_definition, control_device_handling_list, inc_files_and_scopes, lib_files)

        # only allows for one simulator statement
        if self._language_changed:

            self._language_changed = False
            self._grammar = self._grammar_type(self._file, self._language_definition, self._is_top_level_file)
            grammar_iter = iter(self._grammar)

            # skip all lines until past simulator statement
            for i in range(self._statement_count):
                next(grammar_iter)

            for parsed_netlist_line in grammar_iter:
                self._last_line = self.read_line(parsed_netlist_line, self._reader_state, self._top_reader_state,
                                                 self._language_definition, control_device_handling_list, inc_files_and_scopes, lib_files)
                self._statement_count += 1

        logging.debug("Completed parsing file \t\"" + self._file + "\"")

        if self._auto_translate:
            # remove duplicates and re-order to favor translations involving the top
            # scope first
            inc_files_and_scopes = list(set(inc_files_and_scopes))
            if self._reader_state.scope_index.is_top_parent():
                top_inc_files_and_scopes = []
                child_inc_files_and_scopes = []
                for filename, scope in inc_files_and_scopes:
                    if scope.is_top_parent():
                        top_inc_files_and_scopes.append((filename, scope))
                    else:
                        child_inc_files_and_scopes.append((filename, scope))

                inc_files_and_scopes = []
                inc_files_and_scopes = top_inc_files_and_scopes + child_inc_files_and_scopes

            for incfile_pair in inc_files_and_scopes:
                incfile = incfile_pair[0]
                incfile_scope = incfile_pair[1]

                if debug_incfiles is True:
                    print("self._file = '%s'\n" % self._file, file=sys.stderr)
                    print("os.path.dirname(self._file) = '%s'\n" % (os.path.dirname(self._file)), file=sys.stderr)
                    print("os.path.dirname(os.path.abspath(self._file)) = '%s'\n" % (os.path.dirname(os.path.abspath(self._file))), file=sys.stderr)
                    print("incfile = '%s'\n" % incfile, file=sys.stderr)

                # incfile_resolved  = os.path.normpath(os.path.normcase(incfile)).replace('"','')
                # incfile_case_resolved  = os.path.normcase(incfile)
                incfile_case_resolved = incfile
                incfile_quote_resolved = incfile_case_resolved.replace('"', '')
                incfile_quote_resolved = incfile_quote_resolved.replace("'", '')
                incfile_path_resolved = os.path.normpath(incfile_quote_resolved)
                incfile_resolved = incfile_path_resolved
                if debug_incfiles is True:
                    print("incfile_case_resolved = '%s'\n" % incfile_case_resolved, file=sys.stderr)
                    print("incfile_quote_resolved = '%s'\n" % incfile_quote_resolved, file=sys.stderr)
                    print("incfile_path_resolved = '%s'\n" % incfile_path_resolved, file=sys.stderr)

                dirname_resolved = os.path.normpath(os.path.dirname(os.path.abspath(self._file))).replace('"', '')

                if debug_incfiles:
                    print("incfile_resolved = '%s'\n" % incfile_resolved, file=sys.stderr)
                    print("dirname_resolved = '%s'\n" % dirname_resolved, file=sys.stderr)

                if platform == "Windows":
                    if debug_incfiles:
                        print("On Windows - pre fixed file string = '%s'\n" % incfile_resolved, file=sys.stderr)
                    incfile_resolved_slashes = incfile_resolved.replace('//', '\\')
                    if debug_incfiles:
                        print("On Windows - post fixed file string = '%s'\n" % incfile_resolved, file=sys.stderr)
                else:
                    if debug_incfiles:
                        print("On Linux or OS X - pre fixed file string = '%s'\n" % incfile_resolved, file=sys.stderr)
                    incfile_resolved_slashes = incfile_resolved.replace('\\', '/')
                    if debug_incfiles:
                        print("On Linux or OS X - post fixed file string = '%s'\n" % incfile_resolved, file=sys.stderr)

                if debug_incfiles:
                    print("incfile_resolved_slashes = '%s'\n" % incfile_resolved_slashes, file=sys.stderr)

                inc_path, incfile_resolved2 = os.path.split(incfile_resolved_slashes)
                incfile_resolved3 = os.path.join(inc_path, incfile_resolved2)

                if debug_incfiles is True:
                    print("incfile2_resolved = '%s'\n" % incfile_resolved2, file=sys.stderr)
                    print("incfile3_resolved = '%s'\n" % incfile_resolved3, file=sys.stderr)

                filename = os.path.join(dirname_resolved, incfile_resolved3).replace('"', '')

                if debug_incfiles is True:
                    print("filename = '%s'\n" % filename, file=sys.stderr)

                logging.debug("Loading include file \t\t\"" + str(filename) + "\"")

                curr_scope = self._reader_state.scope_index
                self._reader_state.scope_index = incfile_scope
                include_file_reader = GenericReader(filename, self._grammar_type, self._language_definition,
                                                    reader_state=self._reader_state, top_reader_state=self._top_reader_state, 
                                                    is_top_level_file=False, tspice_xml=self._tspice_xml, pspice_xml=self._pspice_xml,
                                                    hspice_xml=self._hspice_xml, spectre_xml=self._spectre_xml, auto_translate=self._auto_translate)
                include_file_reader.read()
                self._reader_state.scope_index = curr_scope

            # re-arranges list of library filename/sections to be parsed. Originally
            # stored as a list of tuples; i.e. [(filname, sect), ... ]. Transfers
            # into an OrderedDict, with keys be the filenames and a list of library
            # sections being the dictionary entry
            lib_files_aggregated_sects = OrderedDict()
            for libfile in lib_files:
                if not libfile[0] in lib_files_aggregated_sects:
                    lib_files_aggregated_sects[libfile[0]] = []

                lib_files_aggregated_sects[libfile[0]].append(libfile[1])
             
            #read each library file/section list
            for libfile in lib_files_aggregated_sects:
                lib_file_name = libfile
                lib_names = deepcopy(lib_files_aggregated_sects[libfile])
                logging.info("Parsing Lib File: " + lib_file_name + " sections: " + ",".join(lib_names))

                filename = lib_file_name.replace("'", '').replace('"', '')

                if not os.path.isfile(filename):
                    filename = os.path.join(os.path.dirname(self._file), filename)

                library_file_reader = GenericReader(filename, self._grammar_type, self._language_definition,
                                                    reader_state=self._reader_state, top_reader_state=self._top_reader_state,
                                                    is_top_level_file=False, tspice_xml=self._tspice_xml, pspice_xml=self._pspice_xml,
                                                    hspice_xml=self._hspice_xml, spectre_xml=self._spectre_xml, auto_translate=self._auto_translate, 
                                                    lib_sect_list=lib_names)
                library_file_reader.read()

            # translate .lib files that are in child scope
            if self._is_top_level_file:
                count = 0 

                # list of .lib files in child scope may be growing ...
                while count < len(self._reader_state.lib_files_not_in_scope):
                    filename = self._reader_state.lib_files_not_in_scope[count]
                    logging.info("Parsing Lib File: " + filename)

                    library_file_reader = GenericReader(filename, self._grammar_type, self._language_definition,
                                                        reader_state=self._reader_state, top_reader_state=self._top_reader_state,
                                                        is_top_level_file=False, tspice_xml=self._tspice_xml, pspice_xml=self._pspice_xml,
                                                        hspice_xml=self._hspice_xml, spectre_xml=self._spectre_xml, auto_translate=self._auto_translate, 
                                                        lib_sect_list=[])
                    library_file_reader.read()
                    count += 1

                # library_file_reader.read_library(lib_name, control_device_handling_list, inc_files_and_scopes, lib_files)

        # unknown_pnl is a netlist line that doesn't yet know its device type
        # (e.g. spectre line that references a model of some type)
        # We loop through all the unknown pnls after the models have been read
        # and set the type and pass it back to read_line to build the device
        parent_scope = self._reader_state.scope_index
        if self._is_top_level_file:
            resolved_pnl_list = []
            for unknown_pnl, scope in self._reader_state.unknown_pnls.items():
                duplicate_pnl_flag = False
                self._reader_state.scope_index = scope
                # If MODEL_NAME is in known_objects, the unresolved device is an instantiation of
                # a user declared model, which will be resolved in first branch. The second branch is
                # for any pnl's that must be written to the top level file. The last branch is 
                # for Spectre, where the possible unresolved name is a name of a source in a directive
                language_definition = self._language_definition

                if "MODEL_NAME" in unknown_pnl.known_objects: 
                    resolved_pnl = self._reader_state.resolve_unknown_pnl(unknown_pnl, self._language_definition)

                    # If there was a language change in the file with the model card,
                    # use that language in resolving the PNL.
                    if "ST_LANG" in resolved_pnl.params_dict:
                        st_lang = resolved_pnl.params_dict.pop("ST_LANG")

                        if st_lang == self._language_definition.language:
                            pass

                        elif st_lang == "hspice":
                            xml_factory = XmlFactory(self._hspice_xml)

                        elif st_lang == "pspice":
                            xml_factory = XmlFactory(self._pspice_xml)

                        elif st_lang == "spectre":
                            xml_factory = XmlFactory(self._spectre_xml)

                        elif st_lang == "tspice":
                            xml_factory = XmlFactory(self._tspice_xml)

                        elif st_lang == "xyce":
                            xml_factory = XmlFactory(self._xyce_xml)

                        if st_lang != self._language_definition.language:
                            xml_factory.read()
                            language_definition = xml_factory.language_definition

                        if (language_definition.is_case_insensitive() and 
                            not self._reader_state.is_case_insensitive()):
                            resolved_pnl.params_dict = OrderedDict((k.upper(), v) 
                                                                   for k, v in resolved_pnl.params_dict.items())
                            resolved_pnl.known_objects = {k.upper():v 
                                                          for k, v in resolved_pnl.known_objects.items()} 

                elif unknown_pnl.flag_top_pnl:
                    resolved_pnl = unknown_pnl
                    resolved_pnl.filename = self._file
                    resolved_pnl.flag_top_pnl = False

                    # check if the top level file pnl is a duplicate. skip if it is
                    if resolved_pnl_list:
                        for prev_resolved_pnl in resolved_pnl_list:
                            match_found_flag = True

                            for key in resolved_pnl.params_dict:
                                if not key in prev_resolved_pnl.params_dict:
                                    match_found_flag = False
                                    break

                                if resolved_pnl.params_dict[key] != prev_resolved_pnl.params_dict[key]:
                                    match_found_flag = False
                                    break

                            if not match_found_flag:
                                continue

                            for key in resolved_pnl.known_objects:
                                if not key in prev_resolved_pnl.known_objects:
                                    match_found_flag = False
                                    break

                                if resolved_pnl.known_objects[key] != prev_resolved_pnl.known_objects[key]:
                                    match_found_flag = False 
                                    break

                            if match_found_flag:
                                duplicate_pnl_flag = True
                                break

                    if duplicate_pnl_flag:
                        continue

                    resolved_pnl_list.append(deepcopy(resolved_pnl))

                else:
                    resolved_pnl = self._reader_state.resolve_unknown_source(unknown_pnl, self._language_definition)

                self.read_line(resolved_pnl, self._reader_state, self._top_reader_state, language_definition,
                               control_device_handling_list, inc_files_and_scopes, lib_files)
            self._reader_state.scope_index = parent_scope

        # resolve lazy objects
        for lazy_statement_tuple in self._reader_state.scope_index.lazy_statement_index:
            for lazy_statement in lazy_statement_tuple[1]:
                for listener in lazy_statement.listener:
                    listener.resolve_lazy_bind(lazy_statement_tuple[0], lazy_statement.scope,
                                               self._reader_state.is_case_insensitive())

        # resolve control Devices
        for device, index in control_device_handling_list:
            device.resolve_control_device_list(index, self._reader_state)

        # set analysis type value of all print statements
        analysis_type = None

        if self._reader_state.scope_index.commands_index.get_statements(""):
            for statement in self._reader_state.scope_index.commands_index.get_statements(""):
                if statement.command_type in analysis_to_print_type:
                    analysis_type = analysis_to_print_type[statement.command_type]
                    break

            if analysis_type:
                for statement in self._reader_state.scope_index.commands_index.get_statements(""):
                    if statement.command_type == ".PRINT":
                        statement.set_prop(Types.analysisTypeValue, analysis_type)

            # iterate all print statements and check output variables
            if self._append_prefix:
                for statement in self._reader_state.scope_index.commands_index.get_statements(""):
                    if statement.command_type == ".PRINT":
                        variable_list = statement.get_prop(Types.outputVariableList)
                        new_list = []
                        if variable_list:
                            for variable in variable_list:
                                stripped_variable = variable[variable.find("(") + 1:variable.find(")")]
                                this_scope = parent_scope
                                colon_divided_list = stripped_variable.split(":")
                                new_colon_divided_list = []
                                for individual_variable in colon_divided_list:
                                    if this_scope.get_statement_by_name(individual_variable):
                                        this_statement = this_scope.get_statement_by_name(individual_variable)
                                        new_name = this_statement.device_type + individual_variable
                                        new_colon_divided_list.append(new_name)
                                        if this_statement.get_prop(Types.subcircuitNameValue):
                                            new_scope = this_scope.get_child_scope(
                                                this_statement.get_prop(Types.subcircuitNameValue).name)
                                            if new_scope:
                                                this_scope = new_scope
                                    else:
                                        new_colon_divided_list.append(individual_variable)
                                new_list.append(
                                    variable[:variable.find("(") + 1] + ":".join(new_colon_divided_list) + variable[
                                        variable.find(")")])
                            statement.set_prop(Types.outputVariableList, new_list)

        if self._is_top_level_file:
            parent_scope.warn_case_sensitivity()
Exemple #3
0
    def read_line(self, parsed_netlist_line, reader_state, top_reader_state, language_definition, control_device_handling_list,
                  inc_files_and_scopes, lib_files):
        """
        Reads a netlist line and calls the appropriate XDMFactory method to insert the statement into the data model.
        """
        if parsed_netlist_line.flag_top_pnl:
            top_reader_state.add_unknown_pnl(parsed_netlist_line)
            
            return parsed_netlist_line.linenum[-1]

        if XDMFactory.is_supported_device(parsed_netlist_line):
            device = XDMFactory.build_device(parsed_netlist_line, reader_state, language_definition)

            if device is None:
                return parsed_netlist_line.linenum[-1]

            # handle case of preprocess directive for xyce
            if parsed_netlist_line.preprocess_keyword_value and "hspice" in language_definition.language:
                # create parsed netlist line object for the preprocess directive
                preprocess_pnl = ParsedNetlistLine(parsed_netlist_line.filename, [0])
                preprocess_pnl.type = ".PREPROCESS"
                preprocess_pnl.local_type = ".PREPROCESS"
                preprocess_pnl.add_known_object(parsed_netlist_line.preprocess_keyword_value[0].split()[0], "PREPROCESS_KEYWORD_VALUE")
                preprocess_pnl.add_value_to_value_list(parsed_netlist_line.preprocess_keyword_value[0].split()[1])

                # check if preprocess directive already in index
                preprocess_uid = -1
                for fl, objs in reader_state.scope_index.source_line_index:
                    for obj in objs:
                        last_uid = obj.uid
                        if isinstance(obj, Command):
                            if obj.command_type == ".PREPROCESS":
                                preprocess_uid = obj.uid

                # if preprocess directive not in index, add in as second index after TITLE object
                if preprocess_uid < 0:
                    XDMFactory.build_directive(preprocess_pnl, reader_state, language_definition, self._lib_sect_list)

            if device.resolve_control_devices:
                control_device_handling_list.append((device, reader_state.scope_index))
            if device.device_type == "X":
                reader_state.add_subcircuit_device(device, reader_state.scope_index)
        elif XDMFactory.is_unknown_device(parsed_netlist_line):
            reader_state.add_unknown_pnl(parsed_netlist_line)
        elif XDMFactory.is_supported_directive(parsed_netlist_line):

            # for case of standalone .PARAM statements with no actual parameters!
            if parsed_netlist_line.type == ".PARAM" and not parsed_netlist_line.params_dict:
                parsed_netlist_line.type = "COMMENT"
                parsed_netlist_line.local_type = ""
                parsed_netlist_line.name = ".PARAM"
                parsed_netlist_line.params_dict["COMMENT"] = ".PARAM"
                XDMFactory.build_comment(parsed_netlist_line, reader_state)

            # BEWARE: hack for Spectre -- takes .PARAM params from inside subckt and moves them to paramsList for subckt
            # UPDATE: 2019-06-20 -- not sure if the hack is actually needed for Spectre translation, but it seems to 
            #                       cause problems with HSPICE translation (possibly PSPICE as well). But to err on the 
            #                       side of caution, will keep code but will check input language being Spectre 
            #                       before moving into the hacked code block
            elif parsed_netlist_line.type == ".PARAM" and not reader_state.scope_index.is_top_parent() and language_definition._language.upper() == "spectre":
                reader_state.scope_index.subckt_command.set_prop(Types.subcircuitParamsList,
                                                                 parsed_netlist_line.params_dict)
            else:
                directive = XDMFactory.build_directive(parsed_netlist_line, reader_state, language_definition, self._lib_sect_list)
                if parsed_netlist_line.type == ".END":
                    reader_state.end_directive = directive
        elif parsed_netlist_line.type == ".MODEL":
            XDMFactory.build_model(parsed_netlist_line, reader_state, language_definition)
        elif parsed_netlist_line.type == ".INC" or parsed_netlist_line.type == ".INCLUDE":
            if not parsed_netlist_line.known_objects[Types.fileNameValue] in self._reader_state.master_inc_list:
                self._reader_state.add_master_inc_list(parsed_netlist_line.known_objects[Types.fileNameValue])
                self._reader_state.add_master_inc_list_scopes(reader_state.scope_index)
                inc_files_and_scopes.append((parsed_netlist_line.known_objects[Types.fileNameValue], reader_state.scope_index))
            elif parsed_netlist_line.known_objects[Types.fileNameValue] in self._reader_state.master_inc_list and reader_state.scope_index.is_top_parent():
                for ind, file_and_scope in enumerate(inc_files_and_scopes):
                    (filename, scope) = file_and_scope
                    if parsed_netlist_line.known_objects[Types.fileNameValue] == filename:
                        inc_files_and_scopes[ind] = (parsed_netlist_line.known_objects[Types.fileNameValue], reader_state.scope_index)

                inc_ind = self._reader_state.master_inc_list.index(parsed_netlist_line.known_objects[Types.fileNameValue])
                self._reader_state.master_inc_list_scopes[inc_ind] = reader_state.scope_index
                # For filenames enclosed in single quotes, ntpath doesn't seem to strip trailing
                # single quote. Therefore, will strip the single quotes before before passing
                # to ntpath.
            parsed_netlist_line.known_objects[Types.fileNameValue] = \
                ntpath.split(parsed_netlist_line.known_objects[Types.fileNameValue].replace("'", "").replace("\"", ""))[1]
            XDMFactory.build_directive(parsed_netlist_line, reader_state, language_definition, self._lib_sect_list)
        elif parsed_netlist_line.type == ".LIB":
            # Prepare lib_file name
            if parsed_netlist_line.known_objects.get(Types.fileNameValue):
                lib_file = parsed_netlist_line.known_objects[Types.fileNameValue].replace("'", '').replace('"', '')

                if not os.path.isfile(lib_file):
                    lib_file = os.path.join(os.path.dirname(self._file), lib_file)

            # Only parse .LIB statements if they are on the parent scope (the scope that 
            # includes the stuff that actually needs to be simulated). Other .LIB sections
            # are unused sections that aren't called by current simulation. This avoids
            # multiple parsing/writing of same files.
            if parsed_netlist_line.known_objects.get(Types.fileNameValue) and reader_state.scope_index.is_top_parent():

                # if .lib command calls a section within the same file, add it to list of sections to parse 
                # for the current file. otherwise, add file/section to list of libraries to be parsed later
                if os.path.normpath(self._file) == os.path.normpath(lib_file):
                    self._lib_sect_list.append(parsed_netlist_line.known_objects[Types.libEntry])
                    child = self._reader_state.scope_index.get_child_scope(parsed_netlist_line.known_objects[Types.libEntry])
                    if not child is None:
                        reader_state.scope_index.retroactive_add_statement(child)

                elif parsed_netlist_line.known_objects.get(Types.libEntry):
                    lib_files.append((parsed_netlist_line.known_objects[Types.fileNameValue],
                                      parsed_netlist_line.known_objects[Types.libEntry]))

                else:
                    lib_files.append((parsed_netlist_line.known_objects[Types.fileNameValue],
                                      parsed_netlist_line.known_objects[Types.fileNameValue]))

                if not lib_file in self._reader_state.lib_files_in_scope:
                    self._reader_state.add_lib_files_in_scope(lib_file)

                if lib_file in self._reader_state.lib_files_not_in_scope:
                    self._reader_state.remove_lib_files_not_in_scope(lib_file)

            elif parsed_netlist_line.known_objects.get(Types.fileNameValue) and not reader_state.scope_index.is_top_parent():
                # for the case of a .lib file that isn't used by the top scope, and in included 
                # by a scope outside of the top scope, the file will need to be translated. 
                # unique files (file cannot be saved more than once) are saved to a tracking list 
                # (self._lib_files_not_in_scope) to be parsed at the very end.

                # in case a library section may be added in to top scope by a .lib statement later in the file. save
                # other .lib sects included, for retroactive processing
                if os.path.normpath(self._file) == os.path.normpath(lib_file):
                    self._reader_state.scope_index.add_child_scope_lib_sects(parsed_netlist_line.known_objects[Types.libEntry])

                # for libraries added in child scope in different files
                elif os.path.normpath(self._file) != os.path.normpath(lib_file):
                    if not lib_file in self._reader_state.lib_files_not_in_scope and not lib_file in self._reader_state.lib_files_in_scope:
                        # only file name needs to be saved - the whole file is outside the top scope,
                        # so the library section doesn't matter
                        self._reader_state.add_lib_files_not_in_scope(lib_file)
             
            if parsed_netlist_line.known_objects.get(Types.fileNameValue):
                parsed_netlist_line.known_objects[Types.fileNameValue] = \
                    ntpath.split(parsed_netlist_line.known_objects[Types.fileNameValue].replace("'", "").replace("\"", ""))[1]
            XDMFactory.build_directive(parsed_netlist_line, reader_state, language_definition, self._lib_sect_list)
        elif parsed_netlist_line.type == "DATA":
            XDMFactory.build_data(parsed_netlist_line, reader_state)
        elif parsed_netlist_line.type == "TITLE":
            XDMFactory.build_title(parsed_netlist_line, reader_state)
        elif parsed_netlist_line.type == "COMMENT":
            XDMFactory.build_comment(parsed_netlist_line, reader_state)
        # spectre simulator command.  defines language type
        elif parsed_netlist_line.type == "simulator":
            lang_type = parsed_netlist_line.params_dict.get('lang')
            # for x in lang_type :
            #    # print (x)
            #    # for y in lang_type[x]:
            #        # print (y, ":", lang_type[x][y])
            # print (lang_type['lang'])
            if 'spice' in lang_type:
                logging.info("Spectre Simulator Command Found.  Switching parse mode to spice.")
                xml_factory = XmlFactory(self._hspice_xml)
                xml_factory.read()
                self._language_definition = xml_factory.language_definition
                self._grammar_type = HSPICENetlistBoostParserInterface
                self._language_changed = True
            elif 'spectre' in lang_type:
                logging.info("Spectre Simulator Command Found.  Switching parse mode to spectre.")
                xml_factory = XmlFactory(self._spectre_xml)
                xml_factory.read()
                self._language_definition = xml_factory.language_definition
                self._grammar_type = SpectreNetlistBoostParserInterface
                self._language_changed = True
        else:
            logging.error("Unable to parse line: " + str(parsed_netlist_line.linenum))
            raise InvalidTypeException()

        return parsed_netlist_line.linenum[-1]
    def convert_next_token(self, parsed_object, parsed_object_iter, pnl,
                           synthesized_pnls, pkg_dict):
        """
        Takes individual parsed objects from the parsed line object

        Populate ParsedNetlistLine class with all information necessary to create a Statement

        Many hacks contained here
        """
        temper_bool = False

        if (parsed_object.types[0] == SpiritCommon.data_model_type.PARAM_NAME
                or parsed_object.types[0]
                == SpiritCommon.data_model_type.DEFAULT_PARAM_NAME) and (
                    pnl.type == ".OPTION" or pnl.local_type == ".OPTIONS"):
            pnl.type = ".OPTIONS"
            pnl.local_type = ".OPTIONS"

            # GITLAB ISSUE #252: all options now will only appear once, at top netlist
            if not self._top_level_file:
                pnl.flag_top_pnl = True

            # find the adm option name
            orig_param_name = parsed_object.value.upper()
            param_name = orig_param_name

            # find all adm packages that use this parameter
            pkgs = pkg_dict.get(param_name.upper())

            # TODO: Hack Bugzilla 2020, ITL1 => NONLIN MAXSTEP (default 200)
            # TODO: Hack Bugzilla 2020, ITL4 => NONLIN-TRAN MAXSTEP (default 20)

            # TODO: Hack Bugzilla 2020, VNTOL => ABSTOL

            param_name, pkgs = self.hack_packages_bugzilla_2020(
                param_name.upper(), pkgs)

            if pkgs and param_name.upper() in ["TNOM", "SCALE"]:
                if param_name.upper() == "TNOM":
                    self._tnom_defined = True

                pnl.name = ""
                if parsed_object.types[
                        0] == SpiritCommon.data_model_type.PARAM_NAME:
                    param_value_parsed_object = next(parsed_object_iter)
                    param_value = param_value_parsed_object.value
                else:
                    param_value = self.get_default(orig_param_name)

                pnl.add_known_object(pkgs[0], Types.optionPkgTypeValue)

                # converting .OPTIONS METHOD=DEFAULT to .OPTIONS TIMEINT METHOD=TRAP
                if param_name.upper() == "METHOD" and param_value.upper(
                ) == "DEFAULT":
                    param_value = "TRAP"

                pnl.add_param_value_pair(param_name.upper(), param_value)
                if "COMMENT" in pnl.params_dict:
                    pnl.add_inline_comment(pnl.params_dict["COMMENT"])
                    pnl.params_dict.pop("COMMENT")

                for otherPkg in pkgs[1:]:
                    pnl_synth = ParsedNetlistLine(
                        pnl.filename,
                        pnl.linenum)  # what to do with line numbers?
                    pnl_synth.type = ".OPTIONS"
                    pnl_synth.add_known_object(otherPkg,
                                               Types.optionPkgTypeValue)
                    pnl_synth.add_param_value_pair(param_name.upper(),
                                                   param_value)
                    synthesized_pnls.append(pnl_synth)

            else:
                logging.warning("In file:\"" +
                                str(os.path.basename(pnl.filename)) +
                                "\" at line:" + str(pnl.linenum) +
                                ". Could not accept .OPTIONS \"" +
                                orig_param_name.upper() +
                                "\". Retained (as a comment). Continuing.")
                param_value_parsed_object = next(parsed_object_iter)
                if pnl.known_objects:
                    pnl.type = ".OPTIONS"
                    pnl.name = ""
                    if pnl.comment:
                        pnl.add_inline_comment(pnl.comment + " " +
                                               ".OPTIONS " + orig_param_name +
                                               " " +
                                               param_value_parsed_object.value)
                    else:
                        pnl.add_inline_comment(".OPTIONS " + orig_param_name +
                                               " " +
                                               param_value_parsed_object.value)
                else:
                    pnl.type = "COMMENT"
                    pnl.name = ".OPTIONS " + orig_param_name
                    if "COMMENT" in pnl.params_dict:
                        pnl.add_comment(pnl.params_dict["COMMENT"] + " " +
                                        ".OPTIONS " + orig_param_name + " " +
                                        param_value_parsed_object.value)
                    else:
                        pnl.add_comment(".OPTIONS " + orig_param_name + " " +
                                        param_value_parsed_object.value)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper(
                ) == ".IF":

            pnl.type = "COMMENT"
            pnl.add_comment(parsed_object.value)

            self._if_statement = True
            self._nested_if_statement_count += 1

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper(
                ) == ".ELSEIF":

            pnl.type = "COMMENT"
            pnl.add_comment(parsed_object.value)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper(
                ) == ".ELSE":

            pnl.type = "COMMENT"
            pnl.add_comment(parsed_object.value)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper(
                ) == ".ENDIF":

            pnl.type = "COMMENT"
            pnl.add_comment(parsed_object.value)

            self._nested_if_statement_count -= 1
            if self._nested_if_statement_count == 0:
                self._if_statement = False
                self._comment_end_of_if_statement = True

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper(
                ) == ".MACRO":
            pnl.type = ".SUBCKT"

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper(
                ) == ".EOM":
            pnl.type = ".ENDS"

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper(
                ) == ".MEAS":
            pnl.type = ".MEASURE"

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper(
                ) == ".PROBE" or parsed_object.value.upper() == ".PROBE64":
            pnl.type = ".PRINT"
            pnl.add_known_object("TRAN",
                                 Types.analysisTypeValue)  # default tran type

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.OUTPUT_VARIABLE:

            #remove [] from HSPICE print variables -- eventually this will be replaced in the writer
            output_variable_clean = self.clean_hspice_output_variable(
                parsed_object.value)

            pnl.add_output_variable_value(output_variable_clean)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.MODEL_TYPE and pnl.type == ".MODEL":

            # convert hspice type into the general type supported by the ADM
            adm_type = hspice_to_adm_model_type_map.get(
                parsed_object.value.upper())

            # if not mapped, then use current value
            if not adm_type:
                adm_type = parsed_object.value.upper()

            pnl.add_known_object(adm_type, Types.modelType)

            # create a pnl for model binning option
            if "." in pnl.name:
                pnl_synth = ParsedNetlistLine(
                    pnl.filename, [0])  # what to do with line numbers?
                pnl_synth.type = ".OPTIONS"
                pnl_synth.local_type = ".OPTIONS"
                pnl_synth.add_known_object("PARSER", Types.optionPkgTypeValue)
                pnl_synth.add_param_value_pair("model_binning", "true")
                pnl_synth.flag_top_pnl = True
                synthesized_pnls.append(pnl_synth)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.GENERALNODE and not pnl.type in [
                    ".IC", ".DCVOLT", ".NODESET"
                ]:
            output_node = parsed_object.value.replace(".", ":")

            if BoostParserInterface.boost_xdm_map_dict[parsed_object.types[
                    0]] in pnl.known_objects and pnl.type == ".GLOBAL":
                pnl_synth = ParsedNetlistLine(pnl.filename, pnl.linenum)
                pnl_synth.type = ".GLOBAL"
                pnl_synth.local_type = ".GLOBAL"
                pnl_synth.add_known_object(
                    output_node, BoostParserInterface.boost_xdm_map_dict[
                        parsed_object.types[0]])
                synthesized_pnls.append(pnl_synth)
            else:
                pnl.add_known_object(
                    output_node, BoostParserInterface.boost_xdm_map_dict[
                        parsed_object.types[0]])

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.FUNC_NAME_VALUE:
            # For lines with mixed parameter and function statements in HSPICE, separate them out
            # into different ParsedNetlistLine objects and store it in synthesized pnl
            if pnl.params_dict or "FUNC_EXPRESSION" in pnl.known_objects:
                pnl_synth = ParsedNetlistLine(
                    pnl.filename, pnl.linenum)  # what to do with line numbers?
                pnl_synth.type = ".FUNC"
                pnl_synth.local_type = ".FUNC"
                pnl_synth.add_known_object(
                    parsed_object.value,
                    BoostParserInterface.boost_xdm_map_dict[
                        parsed_object.types[0]])
                func_arg_parsed_object = next(parsed_object_iter)
                while func_arg_parsed_object.types[
                        0] == SpiritCommon.data_model_type.FUNC_ARG_VALUE:
                    pnl_synth.add_func_arg_value(func_arg_parsed_object.value)
                    func_arg_parsed_object = next(parsed_object_iter)
                func_expression_parsed_object = func_arg_parsed_object

                temper_bool = self.hack_detect_temper(
                    func_expression_parsed_object.value)
                processed_value = self.hack_ternary_operator(
                    func_expression_parsed_object.value)
                processed_value = self.hack_exponentiation_symbol(
                    processed_value)

                pnl_synth.add_known_object(
                    processed_value, BoostParserInterface.boost_xdm_map_dict[
                        func_expression_parsed_object.types[0]])
                synthesized_pnls.append(pnl_synth)
            else:
                pnl.type = ".FUNC"
                pnl.local_type = ".FUNC"
                pnl.add_known_object(
                    parsed_object.value,
                    BoostParserInterface.boost_xdm_map_dict[
                        parsed_object.types[0]])
                func_arg_parsed_object = next(parsed_object_iter)
                while func_arg_parsed_object.types[
                        0] == SpiritCommon.data_model_type.FUNC_ARG_VALUE:
                    pnl.add_func_arg_value(func_arg_parsed_object.value)
                    func_arg_parsed_object = next(parsed_object_iter)
                func_expression_parsed_object = func_arg_parsed_object

                temper_bool = self.hack_detect_temper(
                    func_expression_parsed_object.value)
                processed_value = self.hack_ternary_operator(
                    func_expression_parsed_object.value)
                processed_value = self.hack_exponentiation_symbol(
                    processed_value)

                pnl.add_known_object(
                    processed_value, BoostParserInterface.boost_xdm_map_dict[
                        func_expression_parsed_object.types[0]])

        elif parsed_object.types[0] == SpiritCommon.data_model_type.PARAM_NAME:

            param_value_parsed_object = next(parsed_object_iter)

            if param_value_parsed_object.types[
                    0] != SpiritCommon.data_model_type.PARAM_VALUE:
                logging.error(
                    "Line(s):" + str(pnl.linenum) +
                    ". Parser passed wrong token.  Expected PARAM_VALUE.  Got "
                    + str(param_value_parsed_object.types[0]))
                raise Exception(
                    "Next Token is not a PARAM_VALUE.  Something went wrong!")

            if pnl.type == ".FUNC":
                # Same as above, for lines with mixed parameter and function statements in HSPICE, separate them out
                # into different ParsedNetlistLine objects and store it in synthesized pnl
                if synthesized_pnls:
                    temper_bool = self.hack_detect_temper(
                        param_value_parsed_object.value)
                    processed_value = self.hack_ternary_operator(
                        param_value_parsed_object.value)
                    processed_value = self.hack_exponentiation_symbol(
                        processed_value)

                    synthesized_pnls[-1].add_param_value_pair(
                        parsed_object.value.upper(), processed_value)
                else:
                    pnl_synth = ParsedNetlistLine(
                        pnl.filename,
                        pnl.linenum)  # what to do with line numbers?
                    pnl_synth.type = ".PARAM"
                    pnl_synth.local_type = ".PARAM"

                    temper_bool = self.hack_detect_temper(
                        param_value_parsed_object.value)
                    processed_value = self.hack_ternary_operator(
                        param_value_parsed_object.value)
                    processed_value = self.hack_exponentiation_symbol(
                        processed_value)

                    pnl_synth.add_param_value_pair(parsed_object.value.upper(),
                                                   processed_value)
                    synthesized_pnls.append(pnl_synth)
            else:
                temper_bool = self.hack_detect_temper(
                    param_value_parsed_object.value)
                processed_value = self.hack_ternary_operator(
                    param_value_parsed_object.value)
                if pnl.type in [
                        ".PARAM", ".SUBCKT", ".MODEL", ".MACRO",
                        ".GLOBAL_PARAM"
                ] or pnl.type in supported_devices:
                    processed_value = self.curly_braces_for_expressions(
                        processed_value)
                processed_value = self.hack_exponentiation_symbol(
                    processed_value)

                pnl.add_param_value_pair(parsed_object.value.upper(),
                                         processed_value)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.PARAM_VALUE and pnl.params_dict:
            # Same as above, for lines with mixed parameter and function statements in HSPICE, separate them out
            # into different ParsedNetlistLine objects and store it in synthesized pnl
            if pnl.type == ".FUNC":
                last_key = synthesized_pnls[-1].params_dict.keys()[-1]
                prev_param_value = synthesized_pnls[-1].params_dict[last_key]

                temper_bool = self.hack_detect_temper(parsed_object.value)
                processed_value = self.hack_ternary_operator(
                    parsed_object.value)
                processed_value = self.hack_exponentiation_symbol(
                    processed_value)

                synthesized_pnls[-1].params_dict[
                    last_key] = prev_param_value + " " + processed_value
            else:
                last_key = pnl.params_dict.keys()[-1]
                prev_param_value = pnl.params_dict[last_key]

                temper_bool = self.hack_detect_temper(parsed_object.value)
                processed_value = self.hack_ternary_operator(
                    parsed_object.value)
                if pnl.type in [
                        ".PARAM", ".SUBCKT", ".MODEL", ".MACRO",
                        ".GLOBAL_PARAM"
                ] or pnl.type in supported_devices:
                    processed_value = self.curly_braces_for_expressions(
                        processed_value)
                processed_value = self.hack_exponentiation_symbol(
                    processed_value)

                pnl.params_dict[
                    last_key] = prev_param_value + " " + processed_value

        elif parsed_object.types[0] == SpiritCommon.data_model_type.COMMENT:
            pnl.type = "COMMENT"
            if parsed_object.value.startswith("//"):
                pnl.name = parsed_object.value[2:]
                pnl.add_comment(parsed_object.value[2:])
            else:
                pnl.name = parsed_object.value[1:]
                pnl.add_comment(parsed_object.value[1:])

        elif parsed_object.types == [
                SpiritCommon.data_model_type.MODEL_NAME,
                SpiritCommon.data_model_type.VALUE
        ]:
            lst = []
            for typ in parsed_object.types:
                lst.append(BoostParserInterface.boost_xdm_map_dict[typ])

            temper_bool = self.hack_detect_temper(parsed_object.value)
            processed_value = self.hack_ternary_operator(parsed_object.value)
            processed_value = self.hack_exponentiation_symbol(processed_value)

            pnl.add_lazy_statement(processed_value, lst)

            # for resistors, check if the resistance is an ABM expression involving a voltage
            # at a node or between two nodes. Change into a B-element if it is.
            if pnl.type == "R" and self.hack_detect_abm(parsed_object.value):
                pnl.type = "B"
                pnl.local_type = "B"
                processed_value = self.hack_exponentiation_symbol(
                    parsed_object.value.strip("'"))
                pnl.add_known_object(
                    "{V(%s,%s)/(%s)}" %
                    (pnl.known_objects["POS_NODE_NAME"],
                     pnl.known_objects["NEG_NODE_NAME"], processed_value),
                    Types.expression)
                pnl.add_known_object(
                    "{V(%s,%s)/(%s)}" %
                    (pnl.known_objects["POS_NODE_NAME"],
                     pnl.known_objects["NEG_NODE_NAME"], processed_value),
                    Types.current)
                pnl.lazy_statement = {}

        elif parsed_object.types[0] in [
                SpiritCommon.data_model_type.DC_VALUE_VALUE,
                SpiritCommon.data_model_type.AC_MAG_VALUE,
                SpiritCommon.data_model_type.AC_PHASE_VALUE
        ]:
            processed_value = self.curly_braces_for_expressions(
                parsed_object.value)

            pnl.add_known_object(
                processed_value, BoostParserInterface.boost_xdm_map_dict[
                    parsed_object.types[0]])

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.DATA_TABLE_NAME:

            if pnl.type == ".DATA":
                pnl.add_known_object(
                    parsed_object.value,
                    BoostParserInterface.boost_xdm_map_dict[
                        parsed_object.types[0]])

            elif pnl.type == ".TRAN":
                pnl_synth = ParsedNetlistLine(pnl.filename, pnl.linenum)
                pnl_synth.type = ".STEP"
                pnl_synth.local_type = ".STEP"

                pnl_synth.add_sweep_param_value("DATA")
                pnl_synth.add_sweep_param_value(parsed_object.value)
                synthesized_pnls.append(pnl_synth)

            elif pnl.type == ".DC" or pnl.type == ".AC":
                pnl.add_sweep_param_value("DATA")
                pnl.add_sweep_param_value(parsed_object.value)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.TRANS_REF_NAME:
            processed_value = self.curly_braces_for_expressions(
                parsed_object.value)
            pnl.add_transient_value(processed_value)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.CONDITIONAL_STATEMENT:

            comment = pnl.params_dict[Types.comment] + parsed_object.value
            pnl.add_comment(comment)

        else:
            XyceNetlistBoostParserInterface.convert_next_token(
                parsed_object, parsed_object_iter, pnl, synthesized_pnls)

        # if "TEMPER" special variable detected, a .GLOBAL_PARAM statement pnl will be synthesized and flagged
        # to indicate it belongs at the top circuit level
        if temper_bool:
            hack_pnl_synth = ParsedNetlistLine(pnl.filename, [0])
            hack_pnl_synth.type = ".GLOBAL_PARAM"
            hack_pnl_synth.local_type = ".GLOBAL_PARAM"
            hack_pnl_synth.add_param_value_pair("XYCE_TEMPER", "25")
            hack_pnl_synth.flag_top_pnl = True
            synthesized_pnls.append(hack_pnl_synth)
    def convert_next_token(parsed_object, parsed_object_iter, pnl, synthesized_pnls, pkg_dict):

        if parsed_object.types[0] == SpiritCommon.data_model_type.PARAM_NAME and pnl.type == ".OPTIONS":

            # find the adm option name
            mapped_name = tspice_to_adm_opt_name_map.get(parsed_object.value.upper(), parsed_object.value.upper())

            # find all adm packages that use this parameter
            pkgs = pkg_dict.get(mapped_name.upper())

            if pkgs:
                param_value_parsed_object = next(parsed_object_iter)

                if param_value_parsed_object.types[0] != SpiritCommon.data_model_type.PARAM_VALUE:
                    logging.error("In file:\"" + pnl.filename + "\" at line:" + str(pnl.linenum) + ". Parser passed wrong token.  Expected PARAM_VALUE.  Got " + str(param_value_parsed_object.types[0]))
                    raise Exception("Next Token is not a PARAM_VALUE.  Something went wrong!")

                pnl.add_known_object(pkgs[0], Types.optionPkgTypeValue)

                param_value = param_value_parsed_object.value

                # converting .OPTIONS METHOD=DEFAULT to .OPTIONS TIMEINT METHOD=TRAP
                if mapped_name.upper() == "METHOD" and param_value.upper() == "DEFAULT":
                    param_value = "TRAP"

                pnl.add_param_value_pair(mapped_name.upper(), param_value)

                for otherPkg in pkgs[1:]:
                    pnl_synth = ParsedNetlistLine(pnl.filename, pnl.linenum)  # what to do with line numbers?
                    pnl_synth.type = ".OPTIONS"
                    pnl_synth.add_known_object(otherPkg, Types.optionPkgTypeValue)
                    pnl_synth.add_param_value_pair(mapped_name.upper(), param_value)
                    synthesized_pnls.append(pnl_synth)
            else:
                logging.warn("In file:\"" + pnl.filename + "\" at line:" + str(pnl.linenum) + ". Could not accept .OPTIONS \"" + mapped_name.upper() + "\". Retained (as a comment). Continuing.")
                pnl.type = "COMMENT"
                pnl.name = ".OPTIONS " + mapped_name
                pnl.add_comment(".OPTIONS " + mapped_name)

        elif parsed_object.types[0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper() == ".LIB":
            pnl.type = ".INC"

        elif parsed_object.types[0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value == ".PARAM":
            pnl.type = ".GLOBAL_PARAM"

        elif parsed_object.types[0] == SpiritCommon.data_model_type.OUTPUT_VARIABLE:

            # remove [] from PSPICE print variables -- eventually this will be replaced in the writer
            output_variable_clean = parsed_object.value
            output_variable_clean = output_variable_clean.replace("[", "")
            output_variable_clean = output_variable_clean.replace("]", "")
            output_variable_clean = output_variable_clean.replace("N(", "V(")
            output_variable_clean = output_variable_clean.replace("N(", "V(")

            pnl.add_output_variable_value(output_variable_clean)

        elif parsed_object.types[0] == SpiritCommon.data_model_type.MODEL_TYPE and pnl.type == ".MODEL":

            # convert pspice type into the general type supported by the ADM
            adm_type = tspice_to_adm_model_type_map.get(parsed_object.value.upper())

            # if not mapped, then use current value
            if adm_type is None:
                adm_type = parsed_object.value.upper()

            pnl.add_known_object(adm_type, Types.modelType)
        else:
            XyceNetlistBoostParserInterface.convert_next_token(parsed_object, parsed_object_iter, pnl, synthesized_pnls)
    def convert_next_token(self, parsed_object, parsed_object_iter, pnl,
                           synthesized_pnls, pkg_dict):
        """
        Takes individual parsed objects from the parsed line object

        Populate ParsedNetlistLine class with all information necessary to create a Statement

        Many hacks contained here
        """

        if (parsed_object.types[0] == SpiritCommon.data_model_type.PARAM_NAME
                or parsed_object.types[0]
                == SpiritCommon.data_model_type.DEFAULT_PARAM_NAME
            ) and pnl.type == ".OPTIONS":

            # find the adm option name
            orig_param_name = parsed_object.value.upper()
            param_name = orig_param_name

            # find all adm packages that use this parameter
            pkgs = pkg_dict.get(param_name.upper())

            # TODO: Hack Bugzilla 2020, ITL1 => NONLIN MAXSTEP (default 200)
            # TODO: Hack Bugzilla 2020, ITL4 => NONLIN-TRAN MAXSTEP (default 20)

            # TODO: Hack Bugzilla 2020, VNTOL => ABSTOL

            param_name, pkgs = self.hack_packages_bugzilla_2020(
                param_name.upper(), pkgs)

            if pkgs:
                if parsed_object.types[
                        0] == SpiritCommon.data_model_type.PARAM_NAME:
                    param_value_parsed_object = next(parsed_object_iter)
                    param_value = param_value_parsed_object.value
                else:
                    param_value = self.get_default(orig_param_name)

                pnl.add_known_object(pkgs[0], Types.optionPkgTypeValue)

                # converting .OPTIONS METHOD=DEFAULT to .OPTIONS TIMEINT METHOD=TRAP
                if param_name.upper() == "METHOD" and param_value.upper(
                ) == "DEFAULT":
                    param_value = "TRAP"

                pnl.add_param_value_pair(param_name.upper(), param_value)

                for otherPkg in pkgs[1:]:
                    pnl_synth = ParsedNetlistLine(
                        pnl.filename,
                        pnl.linenum)  # what to do with line numbers?
                    pnl_synth.type = ".OPTIONS"
                    pnl_synth.add_known_object(otherPkg,
                                               Types.optionPkgTypeValue)
                    pnl_synth.add_param_value_pair(param_name.upper(),
                                                   param_value)
                    synthesized_pnls.append(pnl_synth)

            else:
                logging.warning("In file:\"" +
                                str(os.path.basename(pnl.filename)) +
                                "\" at line:" + str(pnl.linenum) +
                                ". Could not accept .OPTIONS \"" +
                                orig_param_name.upper() +
                                "\". Retained (as a comment). Continuing.")
                pnl.type = "COMMENT"
                pnl.name = ".OPTIONS " + orig_param_name
                pnl.add_comment(".OPTIONS " + orig_param_name)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper(
                ) == ".LIB":
            pnl.type = ".INC"

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value == ".PARAM":
            pnl.type = ".GLOBAL_PARAM"

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and (
                    parsed_object.value.upper() == ".PROBE"
                    or parsed_object.value.upper() == ".PROBE64"):
            pnl.type = ".PRINT"
            pnl.add_known_object("TRAN",
                                 Types.analysisTypeValue)  # default tran type

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.OUTPUT_VARIABLE:

            # remove [] from PSPICE print variables -- eventually this will be replaced in the writer
            output_variable_clean = self.clean_pspice_output_variable(
                parsed_object.value)

            pnl.add_output_variable_value(output_variable_clean)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.MODEL_TYPE and pnl.type == ".MODEL":

            # convert pspice type into the general type supported by the ADM
            adm_type = pspice_to_adm_model_type_map.get(
                parsed_object.value.upper())

            # if not mapped, then use current value
            if not adm_type:
                adm_type = parsed_object.value.upper()

            pnl.add_known_object(adm_type, Types.modelType)

        elif parsed_object.types[
                0] == SpiritCommon.data_model_type.GENERALNODE and not pnl.type in [
                    ".IC", ".DCVOLT", ".NODESET"
                ]:
            output_node = parsed_object.value.replace(".", ":")
            pnl.add_known_object(
                output_node, BoostParserInterface.boost_xdm_map_dict[
                    parsed_object.types[0]])

        else:
            XyceNetlistBoostParserInterface.convert_next_token(
                parsed_object, parsed_object_iter, pnl, synthesized_pnls)