def convert_next_token(parsed_object, parsed_object_iter, pnl, synthesized_pnls): """ Takes individual parsed objects from the parsed line object Populate ParsedNetlistLine class with all information necessary to create a Statement Many hacks contained here """ if parsed_object.types[ 0] == SpiritCommon.data_model_type.DIRECTIVE_NAME or parsed_object.types[ 0] == SpiritCommon.data_model_type.DEVICE_TYPE: pnl.local_type = parsed_object.value.upper() if parsed_object.value.upper() == ".TR": pnl.type = ".TRAN" elif parsed_object.value.upper() == ".INITCOND": pnl.type = ".IC" else: pnl.type = parsed_object.value.upper() elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DEVICE_NAME or ( parsed_object.types[0] == SpiritCommon.data_model_type.MODEL_NAME and pnl.type == ".MODEL"): pnl.name = parsed_object.value elif parsed_object.types[0] == SpiritCommon.data_model_type.PARAM_NAME: param_value_parsed_object = next(parsed_object_iter) if param_value_parsed_object.types[ 0] != SpiritCommon.data_model_type.PARAM_VALUE: logging.error( "Line(s):" + str(pnl.linenum) + ". Parser passed wrong token. Expected PARAM_VALUE. Got " + str(param_value_parsed_object.types[0])) raise Exception( "Next Token is not a PARAM_VALUE. Something went wrong!") pnl.add_param_value_pair(parsed_object.value.upper(), param_value_parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.STANDALONE_PARAM: pnl.add_param_value_pair(parsed_object.value.upper(), "1") elif parsed_object.types[ 0] == SpiritCommon.data_model_type.MEASURE_PARAM_NAME: param_value_parsed_object = next(parsed_object_iter) if param_value_parsed_object.types[ 0] != SpiritCommon.data_model_type.MEASURE_PARAM_VALUE: logging.error( "Line(s):" + str(pnl.linenum) + ". Parser passed wrong token. Expected MEASURE_PARAM_VALUE. Got " + str(param_value_parsed_object.types[0])) raise Exception( "Next Token is not a MEASURE_PARAM_VALUE. Something went wrong!" ) pnl.add_meas_param_value_pair( list(pnl.meas_dict.items())[-1][0], parsed_object.value.upper(), param_value_parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.VARIABLE_EXPR_OR_VALUE: sentinel = object() param_value_parsed_object = next(parsed_object_iter, sentinel) hasNext = param_value_parsed_object is not sentinel if not hasNext: pnl.add_meas_param_value_pair( list(pnl.meas_dict.items())[-1][0], parsed_object.value.upper(), "") else: if param_value_parsed_object.types[ 0] == SpiritCommon.data_model_type.VARIABLE_EXPR_OR_VALUE: pnl.add_meas_param_value_pair( list(pnl.meas_dict.items())[-1][0], parsed_object.value.upper(), param_value_parsed_object.value) elif param_value_parsed_object.types[ 0] == SpiritCommon.data_model_type.MEASURE_PARAM_NAME: pnl.add_meas_param_value_pair( list(pnl.meas_dict.items())[-1][0], parsed_object.value.upper(), "") param_value_parsed_object_2 = next(parsed_object_iter) if param_value_parsed_object_2.types[ 0] != SpiritCommon.data_model_type.MEASURE_PARAM_VALUE: logging.error( "Line(s):" + str(pnl.linenum) + ". Parser passed wrong token. Expected MEASURE_PARAM_VALUE. Got " + str(param_value_parsed_object_2.types[0])) raise Exception( "Next Token is not a MEASURE_PARAM_VALUE. Something went wrong!" ) pnl.add_meas_param_value_pair( list(pnl.meas_dict.items())[-1][0], param_value_parsed_object.value.upper(), param_value_parsed_object_2.value) else: logging.error( "Line(s):" + str(pnl.linenum) + ". Parser passed wrong token. Expected VARIABLE_EXPR_OR_VALUE or MEASURE_PARAM_VALUE. Got " + str(param_value_parsed_object.types[0])) raise Exception( "Next Token is not a VARIABLE_EXPR_OR_VALUE or MEASURE_PARAM_VALUE. Something went wrong!" ) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.PARAM_VALUE and pnl.params_dict: last_key = list(pnl.params_dict.keys())[-1] prev_param_value = pnl.params_dict[last_key] pnl.params_dict[ last_key] = prev_param_value + " " + parsed_object.value elif parsed_object.types[ 0] == SpiritCommon.data_model_type.CONTROL_DEVICE: control_dev_name_obj = next(parsed_object_iter) if control_dev_name_obj.types[ 0] != SpiritCommon.data_model_type.CONTROL_DEVICE_NAME: logging.error( "Line(s):" + str(pnl.linenum) + ". Parser passed wrong token. Expected CONTROL_DEVICE_NAME. Got " + str(control_dev_name_obj.types[0])) raise Exception( "Next Token is not a CONTROL_DEVICE_NAME. Something went wrong!" ) pnl.add_control_param_value(parsed_object.value + control_dev_name_obj.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.SWEEP_PARAM_VALUE: pnl.add_sweep_param_value(parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.SCHEDULE_PARAM_VALUE: pnl.add_schedule_param_value(parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.LIST_PARAM_VALUE: pnl.add_value_to_value_list(parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.TABLE_PARAM_VALUE: pnl.add_table_param_value(parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.POLY_PARAM_VALUE: pnl.add_poly_param_value(parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DATA_PARAM_NAME: pnl.add_value_to_value_list(parsed_object.value) pnl_synth = ParsedNetlistLine(pnl.filename, [pnl.linenum[0] - 1]) pnl_synth.type = ".GLOBAL_PARAM" pnl_synth.local_type = ".GLOBAL_PARAM" pnl_synth.add_param_value_pair(parsed_object.value.upper(), "0") synthesized_pnls.append(pnl_synth) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DATA_PARAM_VALUE: if not pnl.type: pnl.type = "DATA" pnl.name = parsed_object.value else: pnl.name += " " + parsed_object.value pnl.add_value_to_value_list(parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.MEASURE_TYPE or parsed_object.types[ 0] == SpiritCommon.data_model_type.MEASURE_QUALIFIER: pnl.add_meas_analysis_condition(parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.CONTROL_PARAM_VALUE: pnl.add_control_param_value(parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.SUBCKT_DIRECTIVE_PARAM_VALUE: pnl.add_subckt_directive_param_value(parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.SUBCKT_DEVICE_PARAM_VALUE: pnl.add_subckt_device_param_value(parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.TRANS_REF_NAME: pnl.add_transient_value(parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.OUTPUT_VARIABLE: pnl.add_output_variable_value(parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.FUNC_ARG_VALUE: pnl.add_func_arg_value(parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.INLINE_COMMENT: pnl.add_inline_comment(parsed_object.value) elif parsed_object.types[0] == SpiritCommon.data_model_type.TITLE: pnl.type = "TITLE" pnl.name = parsed_object.value pnl.add_comment(parsed_object.value) elif parsed_object.types[0] == SpiritCommon.data_model_type.COMMENT: pnl.type = "COMMENT" try: pnl.name = parsed_object.value[1:] pnl.add_comment(parsed_object.value[1:]) except UnicodeDecodeError as e: logging.warning( "Non-ASCII character detected in the comment within file '" + str(os.path.basename(pnl.filename)) + "' " + "at line number(s) " + str(pnl.linenum)) warning_msg = " Non-ascii character encountered on line " + str( pnl.linenum) + ". Omitting... " pnl.name = warning_msg pnl.add_comment(warning_msg) elif parsed_object.types[0] in [ SpiritCommon.data_model_type.VOLTAGE, parsed_object.types[0] == SpiritCommon.data_model_type.CURRENT ] and pnl.type in [".IC", ".DCVOLT", ".NODESET"]: initial_condition_dict = {} initial_condition_dict[ Types.voltageOrCurrent] = parsed_object.value pnl.initial_conditions_list.append(initial_condition_dict) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.GENERALNODE and pnl.type in [ ".IC", ".DCVOLT", ".NODESET" ]: output_node = parsed_object.value.replace(".", ":") if not pnl.initial_conditions_list: initial_condition_dict = {} initial_condition_dict[Types.voltageOrCurrent] = "V" initial_condition_dict[Types.generalNodeName] = output_node pnl.initial_conditions_list.append(initial_condition_dict) elif Types.generalNodeName in pnl.initial_conditions_list[-1]: initial_condition_dict = {} initial_condition_dict[Types.voltageOrCurrent] = "V" initial_condition_dict[Types.generalNodeName] = output_node pnl.initial_conditions_list.append(initial_condition_dict) else: pnl.initial_conditions_list[-1][ Types.generalNodeName] = output_node elif parsed_object.types[ 0] == SpiritCommon.data_model_type.GENERAL_VALUE and pnl.type in [ ".IC", ".DCVOLT", ".NODESET" ]: pnl.initial_conditions_list[-1][ Types.generalValue] = parsed_object.value elif len(parsed_object.types) == 1: pnl.add_known_object( parsed_object.value, BoostParserInterface.boost_xdm_map_dict[ parsed_object.types[0]]) if parsed_object.types[ 0] == SpiritCommon.data_model_type.TEMPERATURENODE: pnl.add_param_value_pair("TNODEOUT", "1") else: lst = [] for typ in parsed_object.types: lst.append(BoostParserInterface.boost_xdm_map_dict[typ]) pnl.add_lazy_statement(parsed_object.value, lst) return
def convert_next_token(self, parsed_object, parsed_object_iter, pnl, synthesized_pnls): """ Takes individual parsed objects from the parsed line object Populate ParsedNetlistLine class with all information necessary to create a Statement Many hacks contained here """ if parsed_object.types[ 0] == SpiritCommon.data_model_type.BLOCK_DELIMITER: if parsed_object.value == "{": self._delimited_block = True else: self._delimited_block = False if self._if_statement: self._if_statement = False self._comment_end_of_if_statement = True elif self._if_statement: pnl.type = "COMMENT" elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DIRECTIVE_NAME or parsed_object.types[ 0] == SpiritCommon.data_model_type.DEVICE_TYPE: if spectre_to_adm_model_type_map.get(parsed_object.value): pnl.type = spectre_to_adm_model_type_map[parsed_object.value] pnl.local_type = parsed_object.value else: logging.warning( "Possible error. Spectre type not recognized: " + str(parsed_object.value)) # If directive is .GLOBAL, for now get rid of first listed node. This first node is # considered a ground node. if pnl.type == ".GLOBAL": next(parsed_object_iter) if pnl.type == ".IF": pnl.type = "COMMENT" pnl.add_comment(parsed_object.value) self._if_statement = True elif pnl.type == ".ELSE": pnl.type = "COMMENT" pnl.add_comment(parsed_object.value) self._if_statement = True self._comment_end_of_if_statement = False elif pnl.type == ".ELSEIF": pnl.type = "COMMENT" pnl.add_comment(parsed_object.value) self._if_statement = True self._comment_end_of_if_statement = False elif parsed_object.types[ 0] == SpiritCommon.data_model_type.MODEL_NAME and not pnl.type == ".MODEL": if spectre_to_adm_model_type_map.get(parsed_object.value): pnl.type = spectre_to_adm_model_type_map[parsed_object.value] pnl.local_type = parsed_object.value else: pnl.add_known_object(parsed_object.value, Types.modelName) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.MODEL_TYPE and pnl.type == ".MODEL": adm_type = spectre_to_adm_model_type_map.get(parsed_object.value) # For Spectre, different models aren't distinguished by a "LEVEL" parameter. Instead, # it uses a name to distinguish what model is being used (ex., bsimsoi instead of # LEVEL=10, or vbic instead of LEVEL=10). if adm_type == "M" or adm_type == "Q" or adm_type == "J": pnl.add_param_value_pair("LEVEL", parsed_object.value) if not adm_type: adm_type = parsed_object.value # Default to NMOS for type if adm_type == "M": pnl.add_known_object("NMOS", Types.modelType) pnl.add_param_value_pair("type", "N") elif adm_type == "J": pnl.add_known_object("NJF", Types.modelType) pnl.add_param_value_pair("type", "N") else: pnl.add_known_object(adm_type, Types.modelType) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DEVICE_NAME or ( parsed_object.types[0] == SpiritCommon.data_model_type.MODEL_NAME and pnl.type == ".MODEL"): pnl.name = parsed_object.value elif pnl.type == ".DC": # .DC and .AC directives need four PARAM_NAME/PARAM_VALUE pairs - a sweep variable name, # a start value, a stop value, and a step value if not pnl.sweep_param_list: pnl.add_unused_sweep_params("dc") sweep_list = ["", "", "", ""] for sweep_item in sweep_list: pnl.add_sweep_param_value(sweep_item) if parsed_object.types[ 0] == SpiritCommon.data_model_type.DC_SWEEP_DEV: pnl.add_unused_sweep_params("dev=" + parsed_object.value) # Only save if dc analysis does not involve a param if not pnl.sweep_param_list[0]: pnl.sweep_param_list[0] = parsed_object.value pnl.flag_unresolved_device = True elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DC_SWEEP_PARAM: pnl.add_unused_sweep_params("param=" + parsed_object.value) if not parsed_object.value == "dc": # Overwrite dc analysis with dev if it exists, reset unresolved # device flag to False pnl.sweep_param_list[0] = parsed_object.value pnl.flag_unresolved_device = False elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DC_SWEEP_START: pnl.sweep_param_list[1] = parsed_object.value elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DC_SWEEP_STOP: pnl.sweep_param_list[2] = parsed_object.value elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DC_SWEEP_STEP: pnl.sweep_param_list[3] = parsed_object.value elif parsed_object.types[ 0] == SpiritCommon.data_model_type.PARAM_NAME: sweep_param_name = parsed_object.value sweep_parsed_object = next(parsed_object_iter) if not sweep_parsed_object.types[ 0] == SpiritCommon.data_model_type.PARAM_VALUE: logging.error( "Line(s):" + str(pnl.linenum) + ". Parser passed wrong token. Expected PARAM_VALUE. Got " + str(sweep_parsed_object.types[0])) raise Exception( "Next Token is not a PARAM_VALUE. Something went wrong!" ) sweep_param_value = sweep_parsed_object.value pnl.add_unused_sweep_params(sweep_param_name + "=" + sweep_param_value) # For translation of port instance parameters to names recognized internally by XDM elif parsed_object.types[ 0] == SpiritCommon.data_model_type.PARAM_NAME and pnl.type == "P": param_value_parsed_object = next(parsed_object_iter) if parsed_object.value == "num": pnl.add_param_value_pair("PORT", param_value_parsed_object.value) elif parsed_object.value == "r": pnl.add_param_value_pair("Z0", param_value_parsed_object.value) elif parsed_object.value == "mag": pnl.add_param_value_pair("AC", param_value_parsed_object.value) elif parsed_object.value == "type": pass else: pnl.add_param_value_pair(parsed_object.value.upper(), param_value_parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.GENERALNODE and not pnl.type in [ ".IC", ".DCVOLT", ".NODESET" ]: output_node = parsed_object.value if BoostParserInterface.boost_xdm_map_dict[parsed_object.types[ 0]] in pnl.known_objects and pnl.type == ".GLOBAL": pnl_synth = ParsedNetlistLine(pnl.filename, pnl.linenum) pnl_synth.type = ".GLOBAL" pnl_synth.local_type = ".GLOBAL" pnl_synth.add_known_object( output_node, BoostParserInterface.boost_xdm_map_dict[ parsed_object.types[0]]) synthesized_pnls.append(pnl_synth) else: pnl.add_known_object( output_node, BoostParserInterface.boost_xdm_map_dict[ parsed_object.types[0]]) elif parsed_object.types[0] == SpiritCommon.data_model_type.PARAM_NAME: # For Spectre, the polarity of the device (ex. NMOS or PMOS, or NPN or PNP) # isn't declared as a separate identifier in the .MODEL statement. Instead, # it is saved as a model parameter called "type". The polarity needs to be # extracted and saved in the data model consistent with SPICE parsing if pnl.type == ".MODEL" and parsed_object.value.upper() == "TYPE": param_value_parsed_object = next(parsed_object_iter) if pnl.known_objects.get(Types.modelType).endswith("MOS"): pnl.add_known_object( param_value_parsed_object.value.upper() + "MOS", Types.modelType) elif pnl.known_objects.get(Types.modelType).endswith("JF"): pnl.add_known_object( param_value_parsed_object.value.upper() + "JF", Types.modelType) else: pnl.add_known_object(param_value_parsed_object.value, Types.modelType) pnl.add_param_value_pair(parsed_object.value, param_value_parsed_object.value) elif pnl.type == ".MODEL" and parsed_object.value.upper( ) == "VERSION": param_value_parsed_object = next(parsed_object_iter) pnl.add_param_value_pair(parsed_object.value.upper(), param_value_parsed_object.value) elif not parsed_object.value == "wave": param_value_parsed_object = next(parsed_object_iter) if pnl.type and pnl.type == ".TRAN": self.set_tran_param(pnl, parsed_object.value, param_value_parsed_object.value) elif pnl.type and pnl.type == "V" or pnl.type == "I": processed_value = param_value_parsed_object.value # Some source paramters don't need curly braces, such as: # The "type" parameter indicates source type, such as PULSE or PWL. # The "file" parameter indicates the file to be opened. if not parsed_object.value == "type" and not parsed_object.value == "file": processed_value, msg = convert_to_xyce(processed_value) processed_value = self.hack_ternary_operator( processed_value) pnl.source_params[parsed_object.value] = processed_value else: if param_value_parsed_object.types[ 0] != SpiritCommon.data_model_type.PARAM_VALUE: raise Exception( "Next Token is not a PARAM_VALUE. Something went wrong!" ) if (parsed_object.value.upper() == "M") and pnl.type not in ['R', 'L', 'C']: pnl.m_param = param_value_parsed_object.value msg = None # expression = None if param_value_parsed_object.value.startswith( '[') and param_value_parsed_object.value.endswith( ']'): expression = param_value_parsed_object.value elif is_a_number(param_value_parsed_object.value): processed_value = param_value_parsed_object.value expression = convert_si_unit_prefix(processed_value) else: # For parameters that refer to control devices, skip convert_to_xyce # In the future, this will include cccs, etc. processed_value, msg = convert_to_xyce( param_value_parsed_object.value) expression = self.hack_ternary_operator( processed_value) if expression: pnl.add_param_value_pair(parsed_object.value, expression) else: pnl.add_param_value_pair( parsed_object.value, param_value_parsed_object.value) if msg: logging.warning("Error in expression: " + msg + str(parsed_object.value)) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DC_VALUE_VALUE: processed_value, msg = convert_to_xyce(parsed_object.value) processed_value = self.hack_ternary_operator(processed_value) pnl.add_lazy_statement( processed_value, BoostParserInterface.boost_xdm_map_dict[ parsed_object.types[0]]) elif parsed_object.types[0] in [ SpiritCommon.data_model_type.AC_MAG_VALUE, SpiritCommon.data_model_type.AC_PHASE_VALUE ]: processed_value, msg = convert_to_xyce(parsed_object.value) processed_value = self.hack_ternary_operator(processed_value) if parsed_object.types[ 0] == SpiritCommon.data_model_type.AC_MAG_VALUE: pnl.add_known_object("AC", Types.acValue) pnl.add_known_object( processed_value, BoostParserInterface.boost_xdm_map_dict[ parsed_object.types[0]]) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.CONTROL_DEVICE: control_dev_name_obj = next(parsed_object_iter) if control_dev_name_obj.types[ 0] != SpiritCommon.data_model_type.CONTROL_DEVICE_NAME: logging.error( "Line(s):" + str(pnl.linenum) + ". Parser passed wrong token. Expected CONTROL_DEVICE_NAME. Got " + str(control_dev_name_obj.types[0])) raise Exception( "Next Token is not a CONTROL_DEVICE_NAME. Something went wrong!" ) pnl.add_control_param_value(control_dev_name_obj.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.OUTPUT_VARIABLE: formatted_output_variable = format_output_variable( parsed_object.value) pnl.add_output_variable_value(formatted_output_variable) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.UNKNOWN_NODE: pnl.add_unknown_node(parsed_object.value) elif parsed_object.types[0] == SpiritCommon.data_model_type.COMMENT: # If a comment comes in the middle of a delimited block, synthesize a PNL # object for the comment and leave the original PNL unmolested if self._delimited_block: pnl_synth = ParsedNetlistLine(pnl.filename, [pnl.linenum[-1]]) pnl_synth.type = "COMMENT" pnl_synth.name = parsed_object.value pnl_synth.add_comment(parsed_object.value) synthesized_pnls.append(pnl_synth) else: pnl.type = "COMMENT" pnl.name = parsed_object.value pnl.add_comment(parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.LIB_ENTRY and pnl.type and not pnl.type == ".ENDL": # convert to .lib from .include pnl.type = ".LIB" pnl.add_known_object( parsed_object.value, BoostParserInterface.boost_xdm_map_dict[ parsed_object.types[0]]) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.FUNC_EXPRESSION: processed_value, msg = convert_to_xyce(parsed_object.value) processed_value = self.hack_ternary_operator(processed_value) if not processed_value.startswith("{"): processed_value = "{" + processed_value + "}" pnl.add_known_object( processed_value, BoostParserInterface.boost_xdm_map_dict[ parsed_object.types[0]]) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.CONDITIONAL_STATEMENT: comment = pnl.params_dict[Types.comment] + parsed_object.value pnl.add_comment(comment) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.BINNED_MODEL_NAME: # if "." already in model name, need to create synthesized pnl for next # binned model if "." in pnl.name: model_name = pnl.name.split(".")[0] pnl_synth = ParsedNetlistLine(pnl.filename, [pnl.linenum[-1]]) pnl_synth.type = ".MODEL" pnl_synth.local_type = "model" pnl_synth.name = model_name + "." + parsed_object.value pnl_synth.add_param_value_pair("LEVEL", pnl.params_dict["LEVEL"]) pnl_synth.add_known_object(pnl.known_objects["MODEL_TYPE"], Types.modelType) synthesized_pnls.append(pnl_synth) self._modify_synth_pnl = True else: pnl.name = pnl.name + "." + parsed_object.value elif parsed_object.types[ 0] == SpiritCommon.data_model_type.VOLTAGE or parsed_object.types[ 0] == SpiritCommon.data_model_type.CURRENT: expression_obj = next(parsed_object_iter) if expression_obj.types[ 0] != SpiritCommon.data_model_type.EXPRESSION: logging.error( "Line(s):" + str(pnl.linenum) + ". Parser passed wrong token. Expected EXPRESSION. Got " + str(expression_obj.types[0])) raise Exception( "Next Token is not a EXPRESSION. Something went wrong!") processed_value, msg = convert_to_xyce(expression_obj.value) processed_value = self.hack_ternary_operator(processed_value) pnl.add_known_object(processed_value, Types.expression) if parsed_object.types[0] == SpiritCommon.data_model_type.VOLTAGE: pnl.add_known_object(processed_value, Types.voltage) if parsed_object.types[0] == SpiritCommon.data_model_type.CURRENT: pnl.add_known_object(processed_value, Types.current) else: if is_a_number(parsed_object.value): parsed_object.value = convert_si_unit_prefix( parsed_object.value) XyceNetlistBoostParserInterface.convert_next_token( parsed_object, parsed_object_iter, pnl, synthesized_pnls)
def read(self): """ .. _reader_read: Iterates over an iterable grammar's component statements, and registers relevant components """ import sys inc_files_and_scopes = [] lib_files = [] # tuple list (file name, lib name) control_device_handling_list = [] debug_incfiles = False platform = sys.platform grammar_iter = iter(self._grammar) # iterates through each grammar "line" for parsed_netlist_line in grammar_iter: self._last_line = self.read_line(parsed_netlist_line, self._reader_state, self._top_reader_state, self._language_definition, control_device_handling_list, inc_files_and_scopes, lib_files) self._statement_count += 1 if self._language_changed: break # Add in default TNOM value, if not the same as Xyce's 27C if self._is_top_level_file and not self._grammar.tnom_defined and self._grammar.tnom_value != "27": parsed_netlist_line = ParsedNetlistLine(self._file, [0]) parsed_netlist_line.type = ".OPTIONS" parsed_netlist_line.local_type = ".OPTIONS" parsed_netlist_line.add_known_object("DEVICE", Types.optionPkgTypeValue) parsed_netlist_line.add_param_value_pair("TNOM", self._grammar.tnom_value) self._last_line = self.read_line(parsed_netlist_line, self._reader_state, self._top_reader_state, self._language_definition, control_device_handling_list, inc_files_and_scopes, lib_files) # only allows for one simulator statement if self._language_changed: self._language_changed = False self._grammar = self._grammar_type(self._file, self._language_definition, self._is_top_level_file) grammar_iter = iter(self._grammar) # skip all lines until past simulator statement for i in range(self._statement_count): next(grammar_iter) for parsed_netlist_line in grammar_iter: self._last_line = self.read_line(parsed_netlist_line, self._reader_state, self._top_reader_state, self._language_definition, control_device_handling_list, inc_files_and_scopes, lib_files) self._statement_count += 1 logging.debug("Completed parsing file \t\"" + self._file + "\"") if self._auto_translate: # remove duplicates and re-order to favor translations involving the top # scope first inc_files_and_scopes = list(set(inc_files_and_scopes)) if self._reader_state.scope_index.is_top_parent(): top_inc_files_and_scopes = [] child_inc_files_and_scopes = [] for filename, scope in inc_files_and_scopes: if scope.is_top_parent(): top_inc_files_and_scopes.append((filename, scope)) else: child_inc_files_and_scopes.append((filename, scope)) inc_files_and_scopes = [] inc_files_and_scopes = top_inc_files_and_scopes + child_inc_files_and_scopes for incfile_pair in inc_files_and_scopes: incfile = incfile_pair[0] incfile_scope = incfile_pair[1] if debug_incfiles is True: print("self._file = '%s'\n" % self._file, file=sys.stderr) print("os.path.dirname(self._file) = '%s'\n" % (os.path.dirname(self._file)), file=sys.stderr) print("os.path.dirname(os.path.abspath(self._file)) = '%s'\n" % (os.path.dirname(os.path.abspath(self._file))), file=sys.stderr) print("incfile = '%s'\n" % incfile, file=sys.stderr) # incfile_resolved = os.path.normpath(os.path.normcase(incfile)).replace('"','') # incfile_case_resolved = os.path.normcase(incfile) incfile_case_resolved = incfile incfile_quote_resolved = incfile_case_resolved.replace('"', '') incfile_quote_resolved = incfile_quote_resolved.replace("'", '') incfile_path_resolved = os.path.normpath(incfile_quote_resolved) incfile_resolved = incfile_path_resolved if debug_incfiles is True: print("incfile_case_resolved = '%s'\n" % incfile_case_resolved, file=sys.stderr) print("incfile_quote_resolved = '%s'\n" % incfile_quote_resolved, file=sys.stderr) print("incfile_path_resolved = '%s'\n" % incfile_path_resolved, file=sys.stderr) dirname_resolved = os.path.normpath(os.path.dirname(os.path.abspath(self._file))).replace('"', '') if debug_incfiles: print("incfile_resolved = '%s'\n" % incfile_resolved, file=sys.stderr) print("dirname_resolved = '%s'\n" % dirname_resolved, file=sys.stderr) if platform == "Windows": if debug_incfiles: print("On Windows - pre fixed file string = '%s'\n" % incfile_resolved, file=sys.stderr) incfile_resolved_slashes = incfile_resolved.replace('//', '\\') if debug_incfiles: print("On Windows - post fixed file string = '%s'\n" % incfile_resolved, file=sys.stderr) else: if debug_incfiles: print("On Linux or OS X - pre fixed file string = '%s'\n" % incfile_resolved, file=sys.stderr) incfile_resolved_slashes = incfile_resolved.replace('\\', '/') if debug_incfiles: print("On Linux or OS X - post fixed file string = '%s'\n" % incfile_resolved, file=sys.stderr) if debug_incfiles: print("incfile_resolved_slashes = '%s'\n" % incfile_resolved_slashes, file=sys.stderr) inc_path, incfile_resolved2 = os.path.split(incfile_resolved_slashes) incfile_resolved3 = os.path.join(inc_path, incfile_resolved2) if debug_incfiles is True: print("incfile2_resolved = '%s'\n" % incfile_resolved2, file=sys.stderr) print("incfile3_resolved = '%s'\n" % incfile_resolved3, file=sys.stderr) filename = os.path.join(dirname_resolved, incfile_resolved3).replace('"', '') if debug_incfiles is True: print("filename = '%s'\n" % filename, file=sys.stderr) logging.debug("Loading include file \t\t\"" + str(filename) + "\"") curr_scope = self._reader_state.scope_index self._reader_state.scope_index = incfile_scope include_file_reader = GenericReader(filename, self._grammar_type, self._language_definition, reader_state=self._reader_state, top_reader_state=self._top_reader_state, is_top_level_file=False, tspice_xml=self._tspice_xml, pspice_xml=self._pspice_xml, hspice_xml=self._hspice_xml, spectre_xml=self._spectre_xml, auto_translate=self._auto_translate) include_file_reader.read() self._reader_state.scope_index = curr_scope # re-arranges list of library filename/sections to be parsed. Originally # stored as a list of tuples; i.e. [(filname, sect), ... ]. Transfers # into an OrderedDict, with keys be the filenames and a list of library # sections being the dictionary entry lib_files_aggregated_sects = OrderedDict() for libfile in lib_files: if not libfile[0] in lib_files_aggregated_sects: lib_files_aggregated_sects[libfile[0]] = [] lib_files_aggregated_sects[libfile[0]].append(libfile[1]) #read each library file/section list for libfile in lib_files_aggregated_sects: lib_file_name = libfile lib_names = deepcopy(lib_files_aggregated_sects[libfile]) logging.info("Parsing Lib File: " + lib_file_name + " sections: " + ",".join(lib_names)) filename = lib_file_name.replace("'", '').replace('"', '') if not os.path.isfile(filename): filename = os.path.join(os.path.dirname(self._file), filename) library_file_reader = GenericReader(filename, self._grammar_type, self._language_definition, reader_state=self._reader_state, top_reader_state=self._top_reader_state, is_top_level_file=False, tspice_xml=self._tspice_xml, pspice_xml=self._pspice_xml, hspice_xml=self._hspice_xml, spectre_xml=self._spectre_xml, auto_translate=self._auto_translate, lib_sect_list=lib_names) library_file_reader.read() # translate .lib files that are in child scope if self._is_top_level_file: count = 0 # list of .lib files in child scope may be growing ... while count < len(self._reader_state.lib_files_not_in_scope): filename = self._reader_state.lib_files_not_in_scope[count] logging.info("Parsing Lib File: " + filename) library_file_reader = GenericReader(filename, self._grammar_type, self._language_definition, reader_state=self._reader_state, top_reader_state=self._top_reader_state, is_top_level_file=False, tspice_xml=self._tspice_xml, pspice_xml=self._pspice_xml, hspice_xml=self._hspice_xml, spectre_xml=self._spectre_xml, auto_translate=self._auto_translate, lib_sect_list=[]) library_file_reader.read() count += 1 # library_file_reader.read_library(lib_name, control_device_handling_list, inc_files_and_scopes, lib_files) # unknown_pnl is a netlist line that doesn't yet know its device type # (e.g. spectre line that references a model of some type) # We loop through all the unknown pnls after the models have been read # and set the type and pass it back to read_line to build the device parent_scope = self._reader_state.scope_index if self._is_top_level_file: resolved_pnl_list = [] for unknown_pnl, scope in self._reader_state.unknown_pnls.items(): duplicate_pnl_flag = False self._reader_state.scope_index = scope # If MODEL_NAME is in known_objects, the unresolved device is an instantiation of # a user declared model, which will be resolved in first branch. The second branch is # for any pnl's that must be written to the top level file. The last branch is # for Spectre, where the possible unresolved name is a name of a source in a directive language_definition = self._language_definition if "MODEL_NAME" in unknown_pnl.known_objects: resolved_pnl = self._reader_state.resolve_unknown_pnl(unknown_pnl, self._language_definition) # If there was a language change in the file with the model card, # use that language in resolving the PNL. if "ST_LANG" in resolved_pnl.params_dict: st_lang = resolved_pnl.params_dict.pop("ST_LANG") if st_lang == self._language_definition.language: pass elif st_lang == "hspice": xml_factory = XmlFactory(self._hspice_xml) elif st_lang == "pspice": xml_factory = XmlFactory(self._pspice_xml) elif st_lang == "spectre": xml_factory = XmlFactory(self._spectre_xml) elif st_lang == "tspice": xml_factory = XmlFactory(self._tspice_xml) elif st_lang == "xyce": xml_factory = XmlFactory(self._xyce_xml) if st_lang != self._language_definition.language: xml_factory.read() language_definition = xml_factory.language_definition if (language_definition.is_case_insensitive() and not self._reader_state.is_case_insensitive()): resolved_pnl.params_dict = OrderedDict((k.upper(), v) for k, v in resolved_pnl.params_dict.items()) resolved_pnl.known_objects = {k.upper():v for k, v in resolved_pnl.known_objects.items()} elif unknown_pnl.flag_top_pnl: resolved_pnl = unknown_pnl resolved_pnl.filename = self._file resolved_pnl.flag_top_pnl = False # check if the top level file pnl is a duplicate. skip if it is if resolved_pnl_list: for prev_resolved_pnl in resolved_pnl_list: match_found_flag = True for key in resolved_pnl.params_dict: if not key in prev_resolved_pnl.params_dict: match_found_flag = False break if resolved_pnl.params_dict[key] != prev_resolved_pnl.params_dict[key]: match_found_flag = False break if not match_found_flag: continue for key in resolved_pnl.known_objects: if not key in prev_resolved_pnl.known_objects: match_found_flag = False break if resolved_pnl.known_objects[key] != prev_resolved_pnl.known_objects[key]: match_found_flag = False break if match_found_flag: duplicate_pnl_flag = True break if duplicate_pnl_flag: continue resolved_pnl_list.append(deepcopy(resolved_pnl)) else: resolved_pnl = self._reader_state.resolve_unknown_source(unknown_pnl, self._language_definition) self.read_line(resolved_pnl, self._reader_state, self._top_reader_state, language_definition, control_device_handling_list, inc_files_and_scopes, lib_files) self._reader_state.scope_index = parent_scope # resolve lazy objects for lazy_statement_tuple in self._reader_state.scope_index.lazy_statement_index: for lazy_statement in lazy_statement_tuple[1]: for listener in lazy_statement.listener: listener.resolve_lazy_bind(lazy_statement_tuple[0], lazy_statement.scope, self._reader_state.is_case_insensitive()) # resolve control Devices for device, index in control_device_handling_list: device.resolve_control_device_list(index, self._reader_state) # set analysis type value of all print statements analysis_type = None if self._reader_state.scope_index.commands_index.get_statements(""): for statement in self._reader_state.scope_index.commands_index.get_statements(""): if statement.command_type in analysis_to_print_type: analysis_type = analysis_to_print_type[statement.command_type] break if analysis_type: for statement in self._reader_state.scope_index.commands_index.get_statements(""): if statement.command_type == ".PRINT": statement.set_prop(Types.analysisTypeValue, analysis_type) # iterate all print statements and check output variables if self._append_prefix: for statement in self._reader_state.scope_index.commands_index.get_statements(""): if statement.command_type == ".PRINT": variable_list = statement.get_prop(Types.outputVariableList) new_list = [] if variable_list: for variable in variable_list: stripped_variable = variable[variable.find("(") + 1:variable.find(")")] this_scope = parent_scope colon_divided_list = stripped_variable.split(":") new_colon_divided_list = [] for individual_variable in colon_divided_list: if this_scope.get_statement_by_name(individual_variable): this_statement = this_scope.get_statement_by_name(individual_variable) new_name = this_statement.device_type + individual_variable new_colon_divided_list.append(new_name) if this_statement.get_prop(Types.subcircuitNameValue): new_scope = this_scope.get_child_scope( this_statement.get_prop(Types.subcircuitNameValue).name) if new_scope: this_scope = new_scope else: new_colon_divided_list.append(individual_variable) new_list.append( variable[:variable.find("(") + 1] + ":".join(new_colon_divided_list) + variable[ variable.find(")")]) statement.set_prop(Types.outputVariableList, new_list) if self._is_top_level_file: parent_scope.warn_case_sensitivity()
def convert_next_token(self, parsed_object, parsed_object_iter, pnl, synthesized_pnls, pkg_dict): """ Takes individual parsed objects from the parsed line object Populate ParsedNetlistLine class with all information necessary to create a Statement Many hacks contained here """ temper_bool = False if (parsed_object.types[0] == SpiritCommon.data_model_type.PARAM_NAME or parsed_object.types[0] == SpiritCommon.data_model_type.DEFAULT_PARAM_NAME) and ( pnl.type == ".OPTION" or pnl.local_type == ".OPTIONS"): pnl.type = ".OPTIONS" pnl.local_type = ".OPTIONS" # GITLAB ISSUE #252: all options now will only appear once, at top netlist if not self._top_level_file: pnl.flag_top_pnl = True # find the adm option name orig_param_name = parsed_object.value.upper() param_name = orig_param_name # find all adm packages that use this parameter pkgs = pkg_dict.get(param_name.upper()) # TODO: Hack Bugzilla 2020, ITL1 => NONLIN MAXSTEP (default 200) # TODO: Hack Bugzilla 2020, ITL4 => NONLIN-TRAN MAXSTEP (default 20) # TODO: Hack Bugzilla 2020, VNTOL => ABSTOL param_name, pkgs = self.hack_packages_bugzilla_2020( param_name.upper(), pkgs) if pkgs and param_name.upper() in ["TNOM", "SCALE"]: if param_name.upper() == "TNOM": self._tnom_defined = True pnl.name = "" if parsed_object.types[ 0] == SpiritCommon.data_model_type.PARAM_NAME: param_value_parsed_object = next(parsed_object_iter) param_value = param_value_parsed_object.value else: param_value = self.get_default(orig_param_name) pnl.add_known_object(pkgs[0], Types.optionPkgTypeValue) # converting .OPTIONS METHOD=DEFAULT to .OPTIONS TIMEINT METHOD=TRAP if param_name.upper() == "METHOD" and param_value.upper( ) == "DEFAULT": param_value = "TRAP" pnl.add_param_value_pair(param_name.upper(), param_value) if "COMMENT" in pnl.params_dict: pnl.add_inline_comment(pnl.params_dict["COMMENT"]) pnl.params_dict.pop("COMMENT") for otherPkg in pkgs[1:]: pnl_synth = ParsedNetlistLine( pnl.filename, pnl.linenum) # what to do with line numbers? pnl_synth.type = ".OPTIONS" pnl_synth.add_known_object(otherPkg, Types.optionPkgTypeValue) pnl_synth.add_param_value_pair(param_name.upper(), param_value) synthesized_pnls.append(pnl_synth) else: logging.warning("In file:\"" + str(os.path.basename(pnl.filename)) + "\" at line:" + str(pnl.linenum) + ". Could not accept .OPTIONS \"" + orig_param_name.upper() + "\". Retained (as a comment). Continuing.") param_value_parsed_object = next(parsed_object_iter) if pnl.known_objects: pnl.type = ".OPTIONS" pnl.name = "" if pnl.comment: pnl.add_inline_comment(pnl.comment + " " + ".OPTIONS " + orig_param_name + " " + param_value_parsed_object.value) else: pnl.add_inline_comment(".OPTIONS " + orig_param_name + " " + param_value_parsed_object.value) else: pnl.type = "COMMENT" pnl.name = ".OPTIONS " + orig_param_name if "COMMENT" in pnl.params_dict: pnl.add_comment(pnl.params_dict["COMMENT"] + " " + ".OPTIONS " + orig_param_name + " " + param_value_parsed_object.value) else: pnl.add_comment(".OPTIONS " + orig_param_name + " " + param_value_parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper( ) == ".IF": pnl.type = "COMMENT" pnl.add_comment(parsed_object.value) self._if_statement = True self._nested_if_statement_count += 1 elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper( ) == ".ELSEIF": pnl.type = "COMMENT" pnl.add_comment(parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper( ) == ".ELSE": pnl.type = "COMMENT" pnl.add_comment(parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper( ) == ".ENDIF": pnl.type = "COMMENT" pnl.add_comment(parsed_object.value) self._nested_if_statement_count -= 1 if self._nested_if_statement_count == 0: self._if_statement = False self._comment_end_of_if_statement = True elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper( ) == ".MACRO": pnl.type = ".SUBCKT" elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper( ) == ".EOM": pnl.type = ".ENDS" elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper( ) == ".MEAS": pnl.type = ".MEASURE" elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper( ) == ".PROBE" or parsed_object.value.upper() == ".PROBE64": pnl.type = ".PRINT" pnl.add_known_object("TRAN", Types.analysisTypeValue) # default tran type elif parsed_object.types[ 0] == SpiritCommon.data_model_type.OUTPUT_VARIABLE: #remove [] from HSPICE print variables -- eventually this will be replaced in the writer output_variable_clean = self.clean_hspice_output_variable( parsed_object.value) pnl.add_output_variable_value(output_variable_clean) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.MODEL_TYPE and pnl.type == ".MODEL": # convert hspice type into the general type supported by the ADM adm_type = hspice_to_adm_model_type_map.get( parsed_object.value.upper()) # if not mapped, then use current value if not adm_type: adm_type = parsed_object.value.upper() pnl.add_known_object(adm_type, Types.modelType) # create a pnl for model binning option if "." in pnl.name: pnl_synth = ParsedNetlistLine( pnl.filename, [0]) # what to do with line numbers? pnl_synth.type = ".OPTIONS" pnl_synth.local_type = ".OPTIONS" pnl_synth.add_known_object("PARSER", Types.optionPkgTypeValue) pnl_synth.add_param_value_pair("model_binning", "true") pnl_synth.flag_top_pnl = True synthesized_pnls.append(pnl_synth) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.GENERALNODE and not pnl.type in [ ".IC", ".DCVOLT", ".NODESET" ]: output_node = parsed_object.value.replace(".", ":") if BoostParserInterface.boost_xdm_map_dict[parsed_object.types[ 0]] in pnl.known_objects and pnl.type == ".GLOBAL": pnl_synth = ParsedNetlistLine(pnl.filename, pnl.linenum) pnl_synth.type = ".GLOBAL" pnl_synth.local_type = ".GLOBAL" pnl_synth.add_known_object( output_node, BoostParserInterface.boost_xdm_map_dict[ parsed_object.types[0]]) synthesized_pnls.append(pnl_synth) else: pnl.add_known_object( output_node, BoostParserInterface.boost_xdm_map_dict[ parsed_object.types[0]]) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.FUNC_NAME_VALUE: # For lines with mixed parameter and function statements in HSPICE, separate them out # into different ParsedNetlistLine objects and store it in synthesized pnl if pnl.params_dict or "FUNC_EXPRESSION" in pnl.known_objects: pnl_synth = ParsedNetlistLine( pnl.filename, pnl.linenum) # what to do with line numbers? pnl_synth.type = ".FUNC" pnl_synth.local_type = ".FUNC" pnl_synth.add_known_object( parsed_object.value, BoostParserInterface.boost_xdm_map_dict[ parsed_object.types[0]]) func_arg_parsed_object = next(parsed_object_iter) while func_arg_parsed_object.types[ 0] == SpiritCommon.data_model_type.FUNC_ARG_VALUE: pnl_synth.add_func_arg_value(func_arg_parsed_object.value) func_arg_parsed_object = next(parsed_object_iter) func_expression_parsed_object = func_arg_parsed_object temper_bool = self.hack_detect_temper( func_expression_parsed_object.value) processed_value = self.hack_ternary_operator( func_expression_parsed_object.value) processed_value = self.hack_exponentiation_symbol( processed_value) pnl_synth.add_known_object( processed_value, BoostParserInterface.boost_xdm_map_dict[ func_expression_parsed_object.types[0]]) synthesized_pnls.append(pnl_synth) else: pnl.type = ".FUNC" pnl.local_type = ".FUNC" pnl.add_known_object( parsed_object.value, BoostParserInterface.boost_xdm_map_dict[ parsed_object.types[0]]) func_arg_parsed_object = next(parsed_object_iter) while func_arg_parsed_object.types[ 0] == SpiritCommon.data_model_type.FUNC_ARG_VALUE: pnl.add_func_arg_value(func_arg_parsed_object.value) func_arg_parsed_object = next(parsed_object_iter) func_expression_parsed_object = func_arg_parsed_object temper_bool = self.hack_detect_temper( func_expression_parsed_object.value) processed_value = self.hack_ternary_operator( func_expression_parsed_object.value) processed_value = self.hack_exponentiation_symbol( processed_value) pnl.add_known_object( processed_value, BoostParserInterface.boost_xdm_map_dict[ func_expression_parsed_object.types[0]]) elif parsed_object.types[0] == SpiritCommon.data_model_type.PARAM_NAME: param_value_parsed_object = next(parsed_object_iter) if param_value_parsed_object.types[ 0] != SpiritCommon.data_model_type.PARAM_VALUE: logging.error( "Line(s):" + str(pnl.linenum) + ". Parser passed wrong token. Expected PARAM_VALUE. Got " + str(param_value_parsed_object.types[0])) raise Exception( "Next Token is not a PARAM_VALUE. Something went wrong!") if pnl.type == ".FUNC": # Same as above, for lines with mixed parameter and function statements in HSPICE, separate them out # into different ParsedNetlistLine objects and store it in synthesized pnl if synthesized_pnls: temper_bool = self.hack_detect_temper( param_value_parsed_object.value) processed_value = self.hack_ternary_operator( param_value_parsed_object.value) processed_value = self.hack_exponentiation_symbol( processed_value) synthesized_pnls[-1].add_param_value_pair( parsed_object.value.upper(), processed_value) else: pnl_synth = ParsedNetlistLine( pnl.filename, pnl.linenum) # what to do with line numbers? pnl_synth.type = ".PARAM" pnl_synth.local_type = ".PARAM" temper_bool = self.hack_detect_temper( param_value_parsed_object.value) processed_value = self.hack_ternary_operator( param_value_parsed_object.value) processed_value = self.hack_exponentiation_symbol( processed_value) pnl_synth.add_param_value_pair(parsed_object.value.upper(), processed_value) synthesized_pnls.append(pnl_synth) else: temper_bool = self.hack_detect_temper( param_value_parsed_object.value) processed_value = self.hack_ternary_operator( param_value_parsed_object.value) if pnl.type in [ ".PARAM", ".SUBCKT", ".MODEL", ".MACRO", ".GLOBAL_PARAM" ] or pnl.type in supported_devices: processed_value = self.curly_braces_for_expressions( processed_value) processed_value = self.hack_exponentiation_symbol( processed_value) pnl.add_param_value_pair(parsed_object.value.upper(), processed_value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.PARAM_VALUE and pnl.params_dict: # Same as above, for lines with mixed parameter and function statements in HSPICE, separate them out # into different ParsedNetlistLine objects and store it in synthesized pnl if pnl.type == ".FUNC": last_key = synthesized_pnls[-1].params_dict.keys()[-1] prev_param_value = synthesized_pnls[-1].params_dict[last_key] temper_bool = self.hack_detect_temper(parsed_object.value) processed_value = self.hack_ternary_operator( parsed_object.value) processed_value = self.hack_exponentiation_symbol( processed_value) synthesized_pnls[-1].params_dict[ last_key] = prev_param_value + " " + processed_value else: last_key = pnl.params_dict.keys()[-1] prev_param_value = pnl.params_dict[last_key] temper_bool = self.hack_detect_temper(parsed_object.value) processed_value = self.hack_ternary_operator( parsed_object.value) if pnl.type in [ ".PARAM", ".SUBCKT", ".MODEL", ".MACRO", ".GLOBAL_PARAM" ] or pnl.type in supported_devices: processed_value = self.curly_braces_for_expressions( processed_value) processed_value = self.hack_exponentiation_symbol( processed_value) pnl.params_dict[ last_key] = prev_param_value + " " + processed_value elif parsed_object.types[0] == SpiritCommon.data_model_type.COMMENT: pnl.type = "COMMENT" if parsed_object.value.startswith("//"): pnl.name = parsed_object.value[2:] pnl.add_comment(parsed_object.value[2:]) else: pnl.name = parsed_object.value[1:] pnl.add_comment(parsed_object.value[1:]) elif parsed_object.types == [ SpiritCommon.data_model_type.MODEL_NAME, SpiritCommon.data_model_type.VALUE ]: lst = [] for typ in parsed_object.types: lst.append(BoostParserInterface.boost_xdm_map_dict[typ]) temper_bool = self.hack_detect_temper(parsed_object.value) processed_value = self.hack_ternary_operator(parsed_object.value) processed_value = self.hack_exponentiation_symbol(processed_value) pnl.add_lazy_statement(processed_value, lst) # for resistors, check if the resistance is an ABM expression involving a voltage # at a node or between two nodes. Change into a B-element if it is. if pnl.type == "R" and self.hack_detect_abm(parsed_object.value): pnl.type = "B" pnl.local_type = "B" processed_value = self.hack_exponentiation_symbol( parsed_object.value.strip("'")) pnl.add_known_object( "{V(%s,%s)/(%s)}" % (pnl.known_objects["POS_NODE_NAME"], pnl.known_objects["NEG_NODE_NAME"], processed_value), Types.expression) pnl.add_known_object( "{V(%s,%s)/(%s)}" % (pnl.known_objects["POS_NODE_NAME"], pnl.known_objects["NEG_NODE_NAME"], processed_value), Types.current) pnl.lazy_statement = {} elif parsed_object.types[0] in [ SpiritCommon.data_model_type.DC_VALUE_VALUE, SpiritCommon.data_model_type.AC_MAG_VALUE, SpiritCommon.data_model_type.AC_PHASE_VALUE ]: processed_value = self.curly_braces_for_expressions( parsed_object.value) pnl.add_known_object( processed_value, BoostParserInterface.boost_xdm_map_dict[ parsed_object.types[0]]) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DATA_TABLE_NAME: if pnl.type == ".DATA": pnl.add_known_object( parsed_object.value, BoostParserInterface.boost_xdm_map_dict[ parsed_object.types[0]]) elif pnl.type == ".TRAN": pnl_synth = ParsedNetlistLine(pnl.filename, pnl.linenum) pnl_synth.type = ".STEP" pnl_synth.local_type = ".STEP" pnl_synth.add_sweep_param_value("DATA") pnl_synth.add_sweep_param_value(parsed_object.value) synthesized_pnls.append(pnl_synth) elif pnl.type == ".DC" or pnl.type == ".AC": pnl.add_sweep_param_value("DATA") pnl.add_sweep_param_value(parsed_object.value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.TRANS_REF_NAME: processed_value = self.curly_braces_for_expressions( parsed_object.value) pnl.add_transient_value(processed_value) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.CONDITIONAL_STATEMENT: comment = pnl.params_dict[Types.comment] + parsed_object.value pnl.add_comment(comment) else: XyceNetlistBoostParserInterface.convert_next_token( parsed_object, parsed_object_iter, pnl, synthesized_pnls) # if "TEMPER" special variable detected, a .GLOBAL_PARAM statement pnl will be synthesized and flagged # to indicate it belongs at the top circuit level if temper_bool: hack_pnl_synth = ParsedNetlistLine(pnl.filename, [0]) hack_pnl_synth.type = ".GLOBAL_PARAM" hack_pnl_synth.local_type = ".GLOBAL_PARAM" hack_pnl_synth.add_param_value_pair("XYCE_TEMPER", "25") hack_pnl_synth.flag_top_pnl = True synthesized_pnls.append(hack_pnl_synth)
def convert_next_token(parsed_object, parsed_object_iter, pnl, synthesized_pnls, pkg_dict): if parsed_object.types[0] == SpiritCommon.data_model_type.PARAM_NAME and pnl.type == ".OPTIONS": # find the adm option name mapped_name = tspice_to_adm_opt_name_map.get(parsed_object.value.upper(), parsed_object.value.upper()) # find all adm packages that use this parameter pkgs = pkg_dict.get(mapped_name.upper()) if pkgs: param_value_parsed_object = next(parsed_object_iter) if param_value_parsed_object.types[0] != SpiritCommon.data_model_type.PARAM_VALUE: logging.error("In file:\"" + pnl.filename + "\" at line:" + str(pnl.linenum) + ". Parser passed wrong token. Expected PARAM_VALUE. Got " + str(param_value_parsed_object.types[0])) raise Exception("Next Token is not a PARAM_VALUE. Something went wrong!") pnl.add_known_object(pkgs[0], Types.optionPkgTypeValue) param_value = param_value_parsed_object.value # converting .OPTIONS METHOD=DEFAULT to .OPTIONS TIMEINT METHOD=TRAP if mapped_name.upper() == "METHOD" and param_value.upper() == "DEFAULT": param_value = "TRAP" pnl.add_param_value_pair(mapped_name.upper(), param_value) for otherPkg in pkgs[1:]: pnl_synth = ParsedNetlistLine(pnl.filename, pnl.linenum) # what to do with line numbers? pnl_synth.type = ".OPTIONS" pnl_synth.add_known_object(otherPkg, Types.optionPkgTypeValue) pnl_synth.add_param_value_pair(mapped_name.upper(), param_value) synthesized_pnls.append(pnl_synth) else: logging.warn("In file:\"" + pnl.filename + "\" at line:" + str(pnl.linenum) + ". Could not accept .OPTIONS \"" + mapped_name.upper() + "\". Retained (as a comment). Continuing.") pnl.type = "COMMENT" pnl.name = ".OPTIONS " + mapped_name pnl.add_comment(".OPTIONS " + mapped_name) elif parsed_object.types[0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper() == ".LIB": pnl.type = ".INC" elif parsed_object.types[0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value == ".PARAM": pnl.type = ".GLOBAL_PARAM" elif parsed_object.types[0] == SpiritCommon.data_model_type.OUTPUT_VARIABLE: # remove [] from PSPICE print variables -- eventually this will be replaced in the writer output_variable_clean = parsed_object.value output_variable_clean = output_variable_clean.replace("[", "") output_variable_clean = output_variable_clean.replace("]", "") output_variable_clean = output_variable_clean.replace("N(", "V(") output_variable_clean = output_variable_clean.replace("N(", "V(") pnl.add_output_variable_value(output_variable_clean) elif parsed_object.types[0] == SpiritCommon.data_model_type.MODEL_TYPE and pnl.type == ".MODEL": # convert pspice type into the general type supported by the ADM adm_type = tspice_to_adm_model_type_map.get(parsed_object.value.upper()) # if not mapped, then use current value if adm_type is None: adm_type = parsed_object.value.upper() pnl.add_known_object(adm_type, Types.modelType) else: XyceNetlistBoostParserInterface.convert_next_token(parsed_object, parsed_object_iter, pnl, synthesized_pnls)
def convert_next_token(self, parsed_object, parsed_object_iter, pnl, synthesized_pnls, pkg_dict): """ Takes individual parsed objects from the parsed line object Populate ParsedNetlistLine class with all information necessary to create a Statement Many hacks contained here """ if (parsed_object.types[0] == SpiritCommon.data_model_type.PARAM_NAME or parsed_object.types[0] == SpiritCommon.data_model_type.DEFAULT_PARAM_NAME ) and pnl.type == ".OPTIONS": # find the adm option name orig_param_name = parsed_object.value.upper() param_name = orig_param_name # find all adm packages that use this parameter pkgs = pkg_dict.get(param_name.upper()) # TODO: Hack Bugzilla 2020, ITL1 => NONLIN MAXSTEP (default 200) # TODO: Hack Bugzilla 2020, ITL4 => NONLIN-TRAN MAXSTEP (default 20) # TODO: Hack Bugzilla 2020, VNTOL => ABSTOL param_name, pkgs = self.hack_packages_bugzilla_2020( param_name.upper(), pkgs) if pkgs: if parsed_object.types[ 0] == SpiritCommon.data_model_type.PARAM_NAME: param_value_parsed_object = next(parsed_object_iter) param_value = param_value_parsed_object.value else: param_value = self.get_default(orig_param_name) pnl.add_known_object(pkgs[0], Types.optionPkgTypeValue) # converting .OPTIONS METHOD=DEFAULT to .OPTIONS TIMEINT METHOD=TRAP if param_name.upper() == "METHOD" and param_value.upper( ) == "DEFAULT": param_value = "TRAP" pnl.add_param_value_pair(param_name.upper(), param_value) for otherPkg in pkgs[1:]: pnl_synth = ParsedNetlistLine( pnl.filename, pnl.linenum) # what to do with line numbers? pnl_synth.type = ".OPTIONS" pnl_synth.add_known_object(otherPkg, Types.optionPkgTypeValue) pnl_synth.add_param_value_pair(param_name.upper(), param_value) synthesized_pnls.append(pnl_synth) else: logging.warning("In file:\"" + str(os.path.basename(pnl.filename)) + "\" at line:" + str(pnl.linenum) + ". Could not accept .OPTIONS \"" + orig_param_name.upper() + "\". Retained (as a comment). Continuing.") pnl.type = "COMMENT" pnl.name = ".OPTIONS " + orig_param_name pnl.add_comment(".OPTIONS " + orig_param_name) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value.upper( ) == ".LIB": pnl.type = ".INC" elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and parsed_object.value == ".PARAM": pnl.type = ".GLOBAL_PARAM" elif parsed_object.types[ 0] == SpiritCommon.data_model_type.DIRECTIVE_NAME and ( parsed_object.value.upper() == ".PROBE" or parsed_object.value.upper() == ".PROBE64"): pnl.type = ".PRINT" pnl.add_known_object("TRAN", Types.analysisTypeValue) # default tran type elif parsed_object.types[ 0] == SpiritCommon.data_model_type.OUTPUT_VARIABLE: # remove [] from PSPICE print variables -- eventually this will be replaced in the writer output_variable_clean = self.clean_pspice_output_variable( parsed_object.value) pnl.add_output_variable_value(output_variable_clean) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.MODEL_TYPE and pnl.type == ".MODEL": # convert pspice type into the general type supported by the ADM adm_type = pspice_to_adm_model_type_map.get( parsed_object.value.upper()) # if not mapped, then use current value if not adm_type: adm_type = parsed_object.value.upper() pnl.add_known_object(adm_type, Types.modelType) elif parsed_object.types[ 0] == SpiritCommon.data_model_type.GENERALNODE and not pnl.type in [ ".IC", ".DCVOLT", ".NODESET" ]: output_node = parsed_object.value.replace(".", ":") pnl.add_known_object( output_node, BoostParserInterface.boost_xdm_map_dict[ parsed_object.types[0]]) else: XyceNetlistBoostParserInterface.convert_next_token( parsed_object, parsed_object_iter, pnl, synthesized_pnls)