def classify(iToken, lObjects): iCurrent = utils.tokenize_label(iToken, lObjects, token.case_label, token.label_colon) iCurrent = utils.assign_next_token_required('case', token.case_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('?', token.question_mark, iCurrent, lObjects) iCurrent = expression.classify_until(['is'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required('is', token.is_keyword, iCurrent, lObjects) iCurrent = utils.detect_submodule(iCurrent, lObjects, case_statement_alternative) iCurrent = utils.assign_next_token_required('end', token.end_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('case', token.end_case_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('?', token.question_mark, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.end_case_label, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.tokenize_label(iToken, lObjects, token.generate_label, token.label_colon) iCurrent = utils.assign_next_token_required('for', token.for_keyword, iCurrent, lObjects) iCurrent = parameter_specification.classify_until(['generate'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required('generate', token.generate_keyword, iCurrent, lObjects) iCurrent = generate_statement_body.classify(iCurrent, lObjects) iCurrent = utils.assign_next_token_required('end', token.end_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('generate', token.end_generate_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.end_generate_label, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify_closing_declaration(iToken, lObjects): iCurrent = utils.assign_next_token_required('end', token.end_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if('configuration', token.end_configuration_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.configuration_simple_name, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('protected', token.protected_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('body', token.body_keyword, iToken, lObjects) iCurrent = protected_type_body_declarative_part.detect(iCurrent, lObjects) iCurrent = utils.assign_next_token_required('end', token.end_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('protected', token.end_protected_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('body', token.end_body_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.protected_type_simple_name, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.tokenize_label(iToken, lObjects, token.generate_label, token.label_colon) iCurrent = utils.assign_next_token_required('if', token.if_keyword, iCurrent, lObjects) ### Need to handle alternaive_label ### iCurrent = condition.classify_until(['generate'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required('generate', token.generate_keyword, iCurrent, lObjects) iCurrent = generate_statement_body.classify(iCurrent, lObjects) while utils.is_next_token('elsif', iCurrent, lObjects): iCurrent = utils.assign_next_token_required('elsif', token.elsif_keyword, iCurrent, lObjects) iCurrent = condition.classify_until(['generate'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required('generate', token.generate_keyword, iCurrent, lObjects) iCurrent = generate_statement_body.classify(iCurrent, lObjects) if utils.is_next_token('else', iCurrent, lObjects): iCurrent = utils.assign_next_token_required('else', token.else_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('generate', token.generate_keyword, iCurrent, lObjects) iCurrent = generate_statement_body.classify(iCurrent, lObjects) iCurrent = utils.assign_next_token_required('end', token.end_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('generate', token.end_generate_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.end_generate_label, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.tokenize_label(iToken, lObjects, token.block_label, token.label_colon) iCurrent = utils.assign_next_token_required('block', token.block_keyword, iCurrent, lObjects) if utils.is_next_token('(', iCurrent, lObjects): iCurrent = utils.assign_next_token_required( '(', token.guard_open_parenthesis, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(')', token.guard_condition, iCurrent, lObjects) iCurrent = utils.assign_next_token_required( ')', token.guard_close_parenthesis, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('is', token.is_keyword, iCurrent, lObjects) iCurrent = block_header.detect(iCurrent, lObjects) iCurrent = block_declarative_part.detect(iCurrent, lObjects) iCurrent = utils.assign_next_token_required('begin', token.begin_keyword, iCurrent, lObjects) iCurrent = block_statement_part.detect(iCurrent, lObjects) iCurrent = utils.assign_next_token_required('end', token.end_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('block', token.end_block_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.end_block_label, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify_closing_declaration(iToken, lObjects): iCurrent = utils.assign_next_token_required('end', token.end_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if('postponed', token.end_postponed_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('process', token.end_process_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.end_process_label, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.tokenize_label(iToken, lObjects, token.loop_label, token.label_colon) iCurrent = iteration_scheme.classify(iCurrent, lObjects) iCurrent = utils.assign_next_token_required('loop', token.loop_keyword, iCurrent, lObjects) iCurrent = sequence_of_statements.detect(iCurrent, lObjects) iCurrent = utils.assign_next_token_required('end', token.end_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('loop', token.end_loop_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.end_loop_label, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify_until(lUntils, iToken, lObjects, oToken=token.identifier): ''' identifier_list ::= identifier { , identifier } ''' iEnd = len(lObjects) - 1 iCurrent = iToken while not utils.is_next_token_one_of(lUntils, iCurrent, lObjects): if iCurrent == iEnd: return iCurrent iCurrent = utils.assign_next_token_if_not(',', oToken, iCurrent, lObjects) iCurrent = utils.assign_next_token_if(',', token.comma, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.tokenize_label(iToken, lObjects, token.generate_label, token.label_colon) iCurrent = utils.assign_next_token_required('case', token.case_keyword, iCurrent, lObjects) iCurrent = expression.classify_until(['generate'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required('generate', token.generate_keyword, iCurrent, lObjects) iToken = utils.detect_submodule(iToken, lObjects, case_generate_alternative) iCurrent = utils.assign_next_token_required('end', token.end_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('generate', token.end_generate_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.end_generate_label, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify_closing_declaration(iToken, lObjects): iCurrent = utils.assign_next_token_required('end', token.end_keyword, iToken, lObjects) if utils.are_next_consecutive_tokens(['package'], iCurrent, lObjects): iCurrent = utils.assign_next_token_required('package', token.end_package_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('body', token.end_body_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.end_package_simple_name, iToken, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iToken, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('record', token.record_keyword, iToken, lObjects) iCurrent = utils.classify_subelement_until('end', element_declaration, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('end', token.end_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('record', token.end_record_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.record_type_simple_name, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('context', token.context_keyword, iToken, lObjects) iCurrent = utils.assign_next_token(token.identifier, iCurrent, lObjects) iCurrent = utils.assign_next_token_required('is', token.is_keyword, iCurrent, lObjects) iCurrent = context_clause.detect(iCurrent, lObjects) iCurrent = utils.assign_next_token_required('end', token.end_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('context', token.end_context_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.context_simple_name, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): ''' generate_statement_body ::= [ block_declarative_part begin ] { concurrent_statement } [ end [ alternative_label ] ; ] ''' iCurrent = utils.find_next_token(iToken, lObjects) iLast = iCurrent iCurrent = block_declarative_part.detect(iCurrent, lObjects) if iCurrent != iLast: iCurrent = utils.assign_next_token_required('begin', token.begin_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('begin', token.begin_keyword, iCurrent, lObjects) iLast = 0 while iCurrent != iLast: iLast = iCurrent if utils.is_next_token_one_of(['elsif', 'else', 'when'], iCurrent, lObjects): return iCurrent if utils.is_next_token_one_of(['end'], iCurrent, lObjects): break iCurrent = concurrent_statement.detect(iCurrent, lObjects) if not utils.are_next_consecutive_tokens(['end', 'generate'], iCurrent, lObjects): iCurrent = utils.assign_next_token_required('end', token.end_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.alternative_label, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.tokenize_label(iToken, lObjects, token.if_label, token.label_colon) iCurrent = utils.assign_next_token_required('if', token.if_keyword, iCurrent, lObjects) iCurrent = condition.classify_until(['then'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required('then', token.then_keyword, iCurrent, lObjects) iCurrent = sequence_of_statements.detect(iCurrent, lObjects) while utils.is_next_token_one_of(['else', 'elsif'], iCurrent, lObjects): if utils.is_next_token('elsif', iCurrent, lObjects): iCurrent = utils.assign_next_token_required('elsif', token.elsif_keyword, iCurrent, lObjects) iCurrent = condition.classify_until(['then'], iCurrent, lObjects) iCurrent = utils.assign_next_token_required('then', token.then_keyword, iCurrent, lObjects) iCurrent = sequence_of_statements.detect(iCurrent, lObjects) else: iCurrent = utils.assign_next_token_required('else', token.else_keyword, iCurrent, lObjects) iCurrent = sequence_of_statements.detect(iCurrent, lObjects) iCurrent = utils.assign_next_token_required('end', token.end_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('if', token.end_if_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.end_if_label, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = utils.assign_next_token_required('is', token.is_keyword, iToken, lObjects) iCurrent = subprogram_declarative_part.detect(iCurrent, lObjects) iCurrent = utils.assign_next_token_required('begin', token.begin_keyword, iCurrent, lObjects) iCurrent = subprogram_statement_part.detect(iCurrent, lObjects) iCurrent = utils.assign_next_token_required('end', token.end_keyword, iCurrent, lObjects) if subprogram_kind.detect(iCurrent, lObjects): iCurrent = subprogram_kind.classify(iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.designator, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent
def classify(iToken, lObjects): iCurrent = iToken if utils.is_next_token('component', iCurrent, lObjects): iCurrent = utils.assign_next_token_required('component', token.component_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token(token.component_name, iCurrent, lObjects) elif utils.is_next_token('configuration', iCurrent, lObjects): iCurrent = utils.assign_next_token_required( 'configuration', token.configuration_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token(token.configuration_name, iCurrent, lObjects) elif utils.is_next_token('entity', iCurrent, lObjects): iCurrent = utils.assign_next_token_required('entity', token.entity_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token(token.entity_name, iCurrent, lObjects) if utils.is_next_token('(', iCurrent, lObjects): iCurrent = utils.assign_next_token_required( '(', token.open_parenthesis, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not( ')', token.architecture_identifier, iCurrent, lObjects) iCurrent = utils.assign_next_token_required( ')', token.close_parenthesis, iCurrent, lObjects) else: iCurrent = utils.assign_next_token(token.component_name, iCurrent, lObjects) return iCurrent