def classify(iToken, lObjects): iCurrent = utils.tokenize_label(iToken, lObjects, token.case_label, token.label_colon) iCurrent = utils.assign_next_token_required('case', token.case_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('?', token.question_mark, iCurrent, lObjects) iCurrent = expression.classify_until(['is'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required('is', token.is_keyword, iCurrent, lObjects) iCurrent = utils.detect_submodule(iCurrent, lObjects, case_statement_alternative) iCurrent = utils.assign_next_token_required('end', token.end_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('case', token.end_case_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('?', token.question_mark, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.end_case_label, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_if('pure', token.pure_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if('impure', token.impure_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('function', token.function_keyword, iToken, lObjects) iCurrent = utils.assign_next_token(token.designator, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('parameter', token.parameter_keyword, iCurrent, lObjects) if utils.is_next_token('(', iCurrent, lObjects): iCurrent = utils.assign_next_token_required('(', token.open_parenthesis, iCurrent, lObjects) iCurrent = formal_parameter_list.classify(iCurrent, lObjects) iCurrent = utils.assign_next_token_required(')', token.close_parenthesis, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('return', token.return_keyword, iToken, lObjects) iCurrent = type_mark.classify(iToken, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_if('register', token.register_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if('bus', token.register_bus, iToken, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('generic', token.generic_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('map', token.map_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('(', token.open_parenthesis, iToken, lObjects) iCurrent = utils.assign_next_token_if('default', token.default_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if('<>', token.undefined_range, iToken, lObjects) iCurrent = utils.assign_next_token_required(')', token.close_parenthesis, iToken, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_if('generic', token.generic_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if('map', token.map_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('(', token.open_parenthesis, iCurrent, lObjects) iCurrent = association_list.classify(iCurrent, lObjects) iCurrent = utils.assign_next_token_if(')', token.close_parenthesis, iCurrent, lObjects) return iCurrent
def detect(iToken, lObjects): ''' force_mode ::= in | out ''' iCurrent = utils.assign_next_token_if('in', token.in_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if('out', token.out_keyword, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): ''' mode ::= in | out | inout | buffer | linkage ''' iCurrent = utils.assign_next_token_if('in', token.in_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if('out', token.out_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('inout', token.inout_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('buffer', token.inout_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('linkage', token.inout_keyword, iCurrent, lObjects) return iCurrent
def classify_until(lUntils, iToken, lObjects): ''' waveform ::= waveform_element { , waveform_element } | unaffected ''' if utils.is_next_token('unaffected', iToken, lObjects): return utils.assign_next_token_required('unaffected', token.unaffected_keyword, iToken, lObjects) iCurrent = iToken lMyUntils = lUntils lMyUntils.append(',') iCurrent = waveform_element.classify_until(lMyUntils, iCurrent, lObjects) while utils.is_next_token(',', iCurrent, lObjects): iCurrent = utils.assign_next_token_required(',', token.comma, iCurrent, lObjects) iCurrent = waveform_element.classify_until(lMyUntils, iCurrent, lObjects) iCurrent = utils.assign_next_token_if(')', parser.todo, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_if('is', token.is_keyword, iToken, lObjects) iCurrent = interface_subprogram_default.classify(iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_if('type', token.type_keyword, iToken, lObjects) iCurrent = identifier.classify(iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_if('shared', token.shared_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('variable', token.variable_keyword, iCurrent, lObjects) iCurrent = identifier_list.classify_until([':'], iCurrent, lObjects, token.identifier) iCurrent = utils.assign_next_token_required(':', token.colon, iCurrent, lObjects) iCurrent = subtype_indication.classify_until([';', ':='], iCurrent, lObjects) if utils.is_next_token(':=', iCurrent, lObjects): iCurrent = utils.assign_next_token_required(':=', token.assignment_operator, iCurrent, lObjects) iCurrent = expression.classify_until([';'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify_closing_declaration(iToken, lObjects): iCurrent = utils.assign_next_token_required('end', token.end_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if('configuration', token.end_configuration_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.configuration_simple_name, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify_opening_declaration(iToken, lObjects): iCurrent = utils.assign_next_token_required('component', token.component_keyword, iToken, lObjects) iCurrent = utils.assign_next_token(token.identifier, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('is', token.is_keyword, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('procedure', token.procedure_keyword, iToken, lObjects) iCurrent = utils.assign_next_token(token.designator, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('parameter', token.parameter_keyword, iCurrent, lObjects) if utils.is_next_token('(', iCurrent, lObjects): iCurrent = utils.assign_next_token_required('(', token.open_parenthesis, iCurrent, lObjects) iCurrent = formal_parameter_list.classify(iCurrent, lObjects) iCurrent = utils.assign_next_token_required(')', token.close_parenthesis, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('(', token.open_parenthesis, iToken, lObjects) while not utils.is_next_token(')', iCurrent, lObjects): iCurrent = utils.assign_next_token_if(',', token.comma, iCurrent, lObjects) iCurrent = utils.assign_next_token(token.enumeration_literal, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(')', token.close_parenthesis, iCurrent, lObjects) return iCurrent
def classify_opening_declaration(iToken, lObjects): iCurrent = utils.tokenize_label(iToken, lObjects, token.process_label, token.label_colon) iCurrent = utils.assign_next_token_if('postponed', token.postponed_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('process', token.process_keyword, iCurrent, lObjects) if utils.is_next_token('(', iCurrent, lObjects): iCurrent = utils.assign_next_token_required('(', token.open_parenthesis, iCurrent, lObjects) iCurrent = process_sensitivity_list.classify(iCurrent, lObjects) iCurrent = utils.assign_next_token_required(')', token.close_parenthesis, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('is', token.is_keyword, iCurrent, lObjects) return iCurrent
def classify_until(lUntils, iToken, lObjects): ''' sensitivity_list ::= *signal*_name { , *signal*_name} ''' iCurrent = iToken iLast = 0 while iLast != iCurrent: iLast = iCurrent if lObjects[utils.find_next_token(iCurrent, lObjects)].get_value().lower() in lUntils: return iCurrent iCurrent = utils.assign_next_token_if(',', token.comma, iCurrent, lObjects) iCurrent = utils.assign_next_token(parser.todo, iCurrent, lObjects)
def classify(iToken, lObjects): iCurrent = utils.tokenize_label(iToken, lObjects, token.label_name, token.label_colon) iCurrent = utils.assign_next_token_if('postponed', token.postponed_keyword, iCurrent, lObjects) iCurrent = assertion.classify(iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('with', token.with_keyword, iToken, lObjects) iCurrent = expression.classify_until(['select'], iToken, lObjects) iCurrent = utils.assign_next_token_required('select', token.select_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if('?', token.question_mark, iCurrent, lObjects) iCurrent = utils.assign_tokens_until('<=', token.target, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('<=', token.assignment, iCurrent, lObjects) iCurrent = delay_mechanism.detect(iCurrent, lObjects) iCurrent = selected_waveforms.classify_until([';'], iToken, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify_closing_declaration(iToken, lObjects): iCurrent = utils.assign_next_token_required('end', token.end_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if('postponed', token.end_postponed_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('process', token.end_process_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.end_process_label, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify_until(lUntils, iToken, lObjects): ''' logical_name_list ::= logical_name { , logical_name } ''' iCurrent = iToken iLast = 0 while iLast != iCurrent: iLast = iCurrent if lObjects[utils.find_next_token( iCurrent, lObjects)].get_value().lower() in lUntils: return iCurrent iCurrent = utils.assign_next_token_if(',', token.comma, iCurrent, lObjects) iCurrent = utils.assign_next_token(token.logical_name, iCurrent, lObjects)
def classify(iToken, lObjects): iCurrent = utils.assign_tokens_until('<=', token.target, iToken, lObjects) iCurrent = utils.assign_next_token_required('<=', token.assignment, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('guarded', token.guarded_keyword, iCurrent, lObjects) iCurrent = delay_mechanism.detect(iCurrent, lObjects) iCurrent = waveform.classify_until([';'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): ''' index_constraint ::= ( discrete_range { , discrete_range } ) ''' iCurrent = utils.assign_next_token_required('(', token.open_parenthesis, iToken, lObjects) while not utils.is_next_token(')', iCurrent, lObjects): iCurrent = discrete_range.classify_until([','], iCurrent, lObjects) iCurrent = utils.assign_next_token_if(',', token.comma, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(')', token.close_parenthesis, iCurrent, lObjects) return iCurrent
def classify_until(lUntils, iToken, lObjects, oToken=token.identifier): ''' identifier_list ::= identifier { , identifier } ''' iEnd = len(lObjects) - 1 iCurrent = iToken while not utils.is_next_token_one_of(lUntils, iCurrent, lObjects): if iCurrent == iEnd: return iCurrent iCurrent = utils.assign_next_token_if_not(',', oToken, iCurrent, lObjects) iCurrent = utils.assign_next_token_if(',', token.comma, iCurrent, lObjects) return iCurrent
def classify_until(lUntils, iToken, lObjects): ''' choices ::= choice { | choice } ''' iCurrent = iToken iLast = 0 lMyUntils = lUntils lMyUntils.append('|') while iLast != iCurrent: iLast = iCurrent iCurrent = choice.classify_until(lMyUntils, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('|', token.bar, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = identifier_list.classify_until([':'], iToken, lObjects, token.identifier) iCurrent = utils.assign_next_token_required(':', token.colon, iCurrent, lObjects) iCurrent = mode.classify(iCurrent, lObjects) iCurrent = subtype_indication.classify_until([';', 'bus', ':='], iCurrent, lObjects, token.subtype_indication) iCurrent = utils.assign_next_token_if('bus', token.bus_keyword, iCurrent, lObjects) if utils.is_next_token(':=', iCurrent, lObjects): iCurrent = utils.assign_next_token_required(':=', token.assignment, iCurrent, lObjects) iCurrent = expression.classify_until([';'], iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): ''' concurrent_conditional_signal_assignment ::= target <= [ guarded ] [ delay_mechanism ] conditional_waveforms ; ''' iCurrent = utils.assign_tokens_until('<=', token.target, iToken, lObjects) iCurrent = utils.assign_next_token_required('<=', token.assignment, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('guarded', token.guarded_keyword, iCurrent, lObjects) iCurrent = delay_mechanism.detect(iCurrent, lObjects) iCurrent = conditional_waveforms.classify_until([';'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('context', token.context_keyword, iToken, lObjects) iCurrent = utils.assign_next_token(token.identifier, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('is', token.is_keyword, iCurrent, lObjects) iCurrent = context_clause.detect(iCurrent, lObjects) iCurrent = utils.assign_next_token_required('end', token.end_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('context', token.end_context_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.context_simple_name, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): ''' generate_statement_body ::= [ block_declarative_part begin ] { concurrent_statement } [ end [ alternative_label ] ; ] ''' iCurrent = utils.find_next_token(iToken, lObjects) iLast = iCurrent iCurrent = block_declarative_part.detect(iCurrent, lObjects) if iCurrent != iLast: iCurrent = utils.assign_next_token_required('begin', token.begin_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('begin', token.begin_keyword, iCurrent, lObjects) iLast = 0 while iCurrent != iLast: iLast = iCurrent if utils.is_next_token_one_of(['elsif', 'else', 'when'], iCurrent, lObjects): return iCurrent if utils.is_next_token_one_of(['end'], iCurrent, lObjects): break iCurrent = concurrent_statement.detect(iCurrent, lObjects) if not utils.are_next_consecutive_tokens(['end', 'generate'], iCurrent, lObjects): iCurrent = utils.assign_next_token_required('end', token.end_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.alternative_label, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.tokenize_label(iToken, lObjects, token.block_label, token.label_colon) iCurrent = utils.assign_next_token_required('block', token.block_keyword, iCurrent, lObjects) if utils.is_next_token('(', iCurrent, lObjects): iCurrent = utils.assign_next_token_required( '(', token.guard_open_parenthesis, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(')', token.guard_condition, iCurrent, lObjects) iCurrent = utils.assign_next_token_required( ')', token.guard_close_parenthesis, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('is', token.is_keyword, iCurrent, lObjects) iCurrent = block_header.detect(iCurrent, lObjects) iCurrent = block_declarative_part.detect(iCurrent, lObjects) iCurrent = utils.assign_next_token_required('begin', token.begin_keyword, iCurrent, lObjects) iCurrent = block_statement_part.detect(iCurrent, lObjects) iCurrent = utils.assign_next_token_required('end', token.end_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('block', token.end_block_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.end_block_label, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent