def classify(iToken, lObjects): iCurrent = utils.tokenize_label(iToken, lObjects, token.generate_label, token.label_colon) iCurrent = utils.assign_next_token_required('for', token.for_keyword, iCurrent, lObjects) iCurrent = parameter_specification.classify_until(['generate'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required('generate', token.generate_keyword, iCurrent, lObjects) iCurrent = generate_statement_body.classify(iCurrent, lObjects) iCurrent = utils.assign_next_token_required('end', token.end_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('generate', token.end_generate_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.end_generate_label, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_if('pure', token.pure_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if('impure', token.impure_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('function', token.function_keyword, iToken, lObjects) iCurrent = utils.assign_next_token(token.designator, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('parameter', token.parameter_keyword, iCurrent, lObjects) if utils.is_next_token('(', iCurrent, lObjects): iCurrent = utils.assign_next_token_required('(', token.open_parenthesis, iCurrent, lObjects) iCurrent = formal_parameter_list.classify(iCurrent, lObjects) iCurrent = utils.assign_next_token_required(')', token.close_parenthesis, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('return', token.return_keyword, iToken, lObjects) iCurrent = type_mark.classify(iToken, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.tokenize_label(iToken, lObjects, token.case_label, token.label_colon) iCurrent = utils.assign_next_token_required('case', token.case_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('?', token.question_mark, iCurrent, lObjects) iCurrent = expression.classify_until(['is'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required('is', token.is_keyword, iCurrent, lObjects) iCurrent = utils.detect_submodule(iCurrent, lObjects, case_statement_alternative) iCurrent = utils.assign_next_token_required('end', token.end_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('case', token.end_case_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('?', token.question_mark, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.end_case_label, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_if('shared', token.shared_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('variable', token.variable_keyword, iCurrent, lObjects) iCurrent = identifier_list.classify_until([':'], iCurrent, lObjects, token.identifier) iCurrent = utils.assign_next_token_required(':', token.colon, iCurrent, lObjects) iCurrent = subtype_indication.classify_until([';', ':='], iCurrent, lObjects) if utils.is_next_token(':=', iCurrent, lObjects): iCurrent = utils.assign_next_token_required(':=', token.assignment_operator, iCurrent, lObjects) iCurrent = expression.classify_until([';'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): ''' assertion ::= assert condition [ report expression ] [ severity expression ] The key to detecting this is looking for the keyword **assert** before a semicolon. ''' iCurrent = utils.assign_next_token_required('assert', token.keyword, iToken, lObjects) iCurrent = condition.classify_until(['report', 'severity', ';'], iCurrent, lObjects) if utils.is_next_token('report', iCurrent, lObjects): iCurrent = utils.assign_next_token_required('report', token.report_keyword, iCurrent, lObjects) iCurrent = expression.classify_until(['severity', ';'], iCurrent, lObjects) if utils.is_next_token('severity', iCurrent, lObjects): iCurrent = utils.assign_next_token_required('severity', token.severity_keyword, iCurrent, lObjects) iCurrent = expression.classify_until([';'], iCurrent, lObjects) return iCurrent
def classify_until(lUntils, iToken, lObjects): ''' conditional_waveforms ::= waveform when condition { else waveform when condition } [ else waveform ] ''' lMyElseUntils = lUntils.copy() lMyElseUntils.append('else') lMyWhenUntils = lUntils.copy() lMyWhenUntils.append('when') iCurrent = waveform.classify_until(['when'], iToken, lObjects) iCurrent = utils.assign_next_token_required('when', token.when_keyword, iCurrent, lObjects) iCurrent = condition.classify_until(lMyElseUntils, iCurrent, lObjects) while utils.is_next_token('else', iCurrent, lObjects): iCurrent = utils.assign_next_token_required('else', token.else_keyword, iCurrent, lObjects) iCurrent = waveform.classify_until(lMyWhenUntils, iCurrent, lObjects) if utils.is_next_token_in_list(lUntils, iToken, lObjects): break iCurrent = utils.assign_next_token_required('when', token.when_keyword, iCurrent, lObjects) iCurrent = condition.classify_until(lMyElseUntils, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('alias', token.alias_keyword, iToken, lObjects) iCurrent = utils.assign_next_token(token.alias_designator, iCurrent, lObjects) if utils.is_next_token(':', iCurrent, lObjects): iCurrent = utils.assign_next_token_required(':', token.colon, iCurrent, lObjects) iCurrent = subtype_indication.classify_until(['is'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required('is', token.is_keyword, iCurrent, lObjects) iCurrent = name.classify_until([';', '['], iCurrent, lObjects) iCurrent = signature.detect(iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): ''' selected_waveforms ::= { waveform when choices , } waveform when choices ''' iCurrent = iToken iCurrent = utils.assign_tokens_until('when', parser.todo, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('when', token.when_keyword, iCurrent, lObjects) sEnd = utils.find_earliest_occurance([',', ';'], iCurrent, lObjects) iCurrent = utils.assign_tokens_until(sEnd, parser.todo, iCurrent, lObjects) while utils.is_next_token(',', iCurrent, lObjects): iCurrent = utils.assign_next_token_required(',', token.comma, iCurrent, lObjects) iCurrent = utils.assign_tokens_until('when', parser.todo, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('when', token.when_keyword, iCurrent, lObjects) sEnd = utils.find_earliest_occurance([',', ';'], iCurrent, lObjects) iCurrent = utils.assign_tokens_until(sEnd, parser.todo, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): ''' entity_name_list ::= entity_designator { , entity_designator } | others | all ''' if utils.is_next_token('others', iToken, lObjects): return utils.assign_next_token_required('others', token.others_keyword, iToken, lObjects) elif utils.is_next_token('all', iToken, lObjects): return utils.assign_next_token_required('all', token.all_keyword, iToken, lObjects) else: iCurrent = entity_designator.classify(iToken, lObjects) while utils.is_next_token(',', iCurrent, lObjects): iCurrent = utils.assign_next_token_required( ',', token.comma, iToken, lObjects) entity_designator.classify(iToken, lObjects) return iCurrent
def classify_until(lUntils, iToken, lObjects): ''' waveform ::= waveform_element { , waveform_element } | unaffected ''' if utils.is_next_token('unaffected', iToken, lObjects): return utils.assign_next_token_required('unaffected', token.unaffected_keyword, iToken, lObjects) iCurrent = iToken lMyUntils = lUntils lMyUntils.append(',') iCurrent = waveform_element.classify_until(lMyUntils, iCurrent, lObjects) while utils.is_next_token(',', iCurrent, lObjects): iCurrent = utils.assign_next_token_required(',', token.comma, iCurrent, lObjects) iCurrent = waveform_element.classify_until(lMyUntils, iCurrent, lObjects) iCurrent = utils.assign_next_token_if(')', parser.todo, iCurrent, lObjects) return iCurrent
def classify_until(lUntils, iToken, lObjects): ''' selected_expressions ::= { expression when choices , } expression when choices ''' iCurrent = iToken lMyUntils = lUntils lMyUntils.append(',') iCurrent = expression.classify_until(['when'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required('when', token.when_keyword, iCurrent, lObjects) iCurrent = choices.classify_until(lMyUntils, iCurrent, lObjects) while utils.is_next_token(',', iCurrent, lObjects): iCurrent = utils.assign_next_token_required(',', token.comma, iCurrent, lObjects) iCurrent = expression.classify_until(['when'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required('when', token.when_keyword, iCurrent, lObjects) iCurrent = choices.classify_until(lMyUntils, iCurrent, lObjects) return iCurrent
def classify_closing_declaration(iToken, lObjects): iCurrent = utils.assign_next_token_required('end', token.end_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if('configuration', token.end_configuration_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.configuration_simple_name, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('file', token.file_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('of', token.of_keyword, iCurrent, lObjects) iCurrent = type_mark.classify(iToken, lObjects) return iCurrent
def classify_opening_declaration(iToken, lObjects): iCurrent = utils.assign_next_token_required('entity', token.entity_keyword, iToken, lObjects) iCurrent = utils.assign_next_token(token.identifier, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('is', token.is_keyword, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('generic', token.generic_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('map', token.map_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('(', token.open_parenthesis, iToken, lObjects) iCurrent = utils.assign_next_token_if('default', token.default_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if('<>', token.undefined_range, iToken, lObjects) iCurrent = utils.assign_next_token_required(')', token.close_parenthesis, iToken, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_tokens_until(':=', token.target, iToken, lObjects) iCurrent = utils.assign_next_token_required(':=', token.assignment, iCurrent, lObjects) iCurrent = expression.classify_until([';'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.tokenize_label(iToken, lObjects, token.label, token.label_colon) iCurrent = utils.assign_next_token_required('null', token.null_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('attribute', token.attribute_keyword, iToken, lObjects) iCurrent = utils.assign_next_token(token.identifier, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(':', token.colon, iCurrent, lObjects) iCurrent = type_mark.classify(iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('type', token.type_keyword, iToken, lObjects) iCurrent = identifier.classify(iCurrent, lObjects, token.identifier) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('port', token.port_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('map', token.map_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('(', token.open_parenthesis, iCurrent, lObjects) iCurrent = association_list.classify(iCurrent, lObjects) iCurrent = utils.assign_next_token_required(')', token.close_parenthesis, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('library', token.keyword, iToken, lObjects) iCurrent = logical_name_list.classify_until([';'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('group', token.group_keyword, iToken, lObjects) iCurrent = utils.assign_next_token(token.identifier, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(':', token.colon, iCurrent, lObjects) iCurrent = utils.assign_next_token(token.group_template_name, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('(', token.open_parenthesis, iCurrent, lObjects) iCurrent = group_constituent_list.classify(iCurrent, lObjects) iCurrent = utils.assign_next_token_required(')', token.close_parenthesis, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('procedure', token.procedure_keyword, iToken, lObjects) iCurrent = utils.assign_next_token(token.designator, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('parameter', token.parameter_keyword, iCurrent, lObjects) if utils.is_next_token('(', iCurrent, lObjects): iCurrent = utils.assign_next_token_required('(', token.open_parenthesis, iCurrent, lObjects) iCurrent = formal_parameter_list.classify(iCurrent, lObjects) iCurrent = utils.assign_next_token_required(')', token.close_parenthesis, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): ''' index_subtype_definition ::= type_mark range <> ''' iCurrent = type_mark.classify(iToken, lObjects) iCurrent = utils.assign_next_token_required('range', token.range_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('<>', token.undefined_range, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.tokenize_label(iToken, lObjects, token.label, token.label_colon) iCurrent = utils.assign_next_token_required('return', token.return_keyword, iCurrent, lObjects) if not utils.is_next_token(';', iCurrent, lObjects): iCurrent = expression.classify_until([';'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify_opening_declaration(iToken, lObjects): iCurrent = utils.assign_next_token_required('package', token.package_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('body', token.body_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token(token.package_simple_name, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('is', token.is_keyword, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('when', token.when_keyword, iToken, lObjects) iCurrent = choices.classify_until(['=>'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required('=>', token.assignment, iToken, lObjects) iCurrent = generate_statement_body.classify(iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('when', token.when_keyword, iToken, lObjects) iCurrent = choices.classify_until(['=>'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required('=>', token.assignment, iCurrent, lObjects) iCurrent = sequence_of_statements.detect(iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('protected', token.protected_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('body', token.body_keyword, iToken, lObjects) iCurrent = protected_type_body_declarative_part.detect(iCurrent, lObjects) iCurrent = utils.assign_next_token_required('end', token.end_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('protected', token.end_protected_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('body', token.end_body_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.protected_type_simple_name, iCurrent, lObjects) return iCurrent
def classify_opening_declaration(iToken, lObjects): iCurrent = utils.assign_next_token_required('architecture', token.architecture_keyword, iToken, lObjects) iCurrent = utils.assign_next_token(token.identifier, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('of', token.of_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token(token.entity_name, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('is', token.is_keyword, iCurrent, lObjects) return iCurrent