def detect(iToken, lObjects): ''' interface_package_generic_map_aspect ::= generic_map_aspect | generic map ( <> ) | generic map ( default ) ''' if utils.are_next_consecutive_tokens(['generic', 'map', '(', '<>'], iToken, lObjects): return classify(iToken, lObjects) elif utils.are_next_consecutive_tokens(['generic', 'map', '(', 'default'], iToken, lObjects): return classify(iToken, lObjects) else: return generic_map_aspect.classify(iToken, lObjects) return iToken
def detect(iToken, lObjects): ''' for_generate_statement ::= *generate*_label : for *generate*_parameter_specification generate generate_statement_body end generate [ *generate*_label ] ; ''' if utils.are_next_consecutive_tokens([None, ':', 'for'], iToken, lObjects): return classify(iToken, lObjects) if utils.are_next_consecutive_tokens(['for'], iToken, lObjects): iIndex = utils.find_next_token(iToken, lObjects) oToken = token.for_keyword(lObjects[iToken].get_value()) utils.print_error_message('generate_label', oToken, iIndex, lObjects) return iToken
def detect(iToken, lObjects): ''' generic_clause ::= generic ( generic_list ) ; ''' if utils.are_next_consecutive_tokens(['generic', '('], iToken, lObjects): return classify(iToken, lObjects) return iToken
def detect(iToken, lObjects): ''' attribute_declaration ::= attribute identifier : type_mark ; ''' if utils.are_next_consecutive_tokens(['attribute', None, ':'], iToken, lObjects): return classify(iToken, lObjects) return iToken
def detect(iToken, lObjects): ''' case_generate_statement ::= *generate*_label : case expression generate case_generate_alternative { case_generate_alternative } end generate [ *generate*_label ] ; ''' if utils.are_next_consecutive_tokens([None, ':', 'case'], iToken, lObjects): return classify(iToken, lObjects) if utils.are_next_consecutive_tokens(['case'], iToken, lObjects): iIndex = utils.find_next_token(iToken, lObjects) oToken = token.case_keyword(lObjects[iToken].get_value()) utils.print_error_message('generate_label', oToken, iIndex, lObjects) return iToken
def detect(iToken, lObjects): ''' attribute_specification ::= attribute attribute_designator of entity_specification is expression ; ''' if utils.are_next_consecutive_tokens(['attribute', None, 'of'], iToken, lObjects): return classify(iToken, lObjects) return iToken
def detect(iToken, lObjects): ''' port_clause ::= port ( port_list ) ; ''' if utils.are_next_consecutive_tokens(['port', '('], iToken, lObjects): return classify(iToken, lObjects) return iToken
def detect(iToken, lObjects): ''' full_type_declaration ::= type identifier is type_definition ; ''' if utils.are_next_consecutive_tokens(['type', None, 'is'], iToken, lObjects): return classify(iToken, lObjects) return iToken
def detect(iToken, lObjects): ''' port_map_aspect ::= port map ( *port*_association_list ) ''' if utils.are_next_consecutive_tokens(['port', 'map', '('], iToken, lObjects): return classify(iToken, lObjects) return iToken
def detect(iToken, lObjects): ''' incomplete_type_declaration ::= type identifier ; ''' if utils.are_next_consecutive_tokens(['type', None, ';'], iToken, lObjects): return classify(iToken, lObjects) return iToken
def detect(iToken, lObjects): ''' next_statement ::= [ label : ] next [ loop_label ] [ when condition ] ; ''' if utils.are_next_consecutive_tokens([None, ':', 'next'], iToken, lObjects): return classify(iToken, lObjects) if utils.is_next_token('next', iToken, lObjects): return classify(iToken, lObjects) return iToken
def detect(iToken, lObjects): ''' null_statement ::= [ label : ] null ; ''' if utils.are_next_consecutive_tokens([None, ':', 'null'], iToken, lObjects): return classify(iToken, lObjects) if utils.is_next_token('null', iToken, lObjects): return classify(iToken, lObjects) return iToken
def detect(iToken, lObjects): ''' package_instantiation_declaration ::= package identifier is new *uninstantiated_package*_name [ generic_map_aspect ] ; ''' if utils.are_next_consecutive_tokens(['package', None, 'is', 'new'], iToken, lObjects): return classify(iToken, lObjects) return iToken
def detect(iToken, lObjects): ''' protected_type_body ::= **protected** **body** protected_type_body_declarative_part **end** **protected** **body** [ protected_type_simple_name ] ''' if utils.are_next_consecutive_tokens(['protected', 'body'], iToken, lObjects): return classify(iToken, lObjects) return iToken
def detect(iToken, lObjects): ''' if_generate_statement ::= *generate*_label : if [ *alternative*_label : ] condition generate generate_statement_body { elsif [ *alternative_label* : ] condition generate generate_statement_body } [ else [ *alternative_label* : ] generate generate_statement_body ] end generate [ *generate*_label ] ; ''' if utils.are_next_consecutive_tokens([None, ':', 'if'], iToken, lObjects): return classify(iToken, lObjects) if utils.are_next_consecutive_tokens(['if'], iToken, lObjects): iIndex = utils.find_next_token(iToken, lObjects) oToken = token.if_keyword(lObjects[iToken].get_value()) utils.print_error_message('generate_label', oToken, iIndex, lObjects) return iToken
def detect(iToken, lObjects): ''' package_body ::= package body *package*_simple_name is package_body_declarative_part end [ package body ] [ *package*_simple_name ] ; ''' if utils.are_next_consecutive_tokens(['package', 'body', None, 'is'], iToken, lObjects): return classify(iToken, lObjects) return iToken
def detect(iToken, lObjects): ''' block_statement ::= block_label : block [ ( *guard*_condition ) ] [ is ] block_header block_declarative_part begin block_statement_part end block [ block_label ] ; ''' if utils.are_next_consecutive_tokens([None, ':', 'block'], iToken, lObjects): return classify(iToken, lObjects) return iToken
def classify_closing_declaration(iToken, lObjects): iCurrent = utils.assign_next_token_required('end', token.end_keyword, iToken, lObjects) if utils.are_next_consecutive_tokens(['package'], iCurrent, lObjects): iCurrent = utils.assign_next_token_required('package', token.end_package_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_required('body', token.end_body_keyword, iToken, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.end_package_simple_name, iToken, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iToken, lObjects) return iCurrent
def classify(iToken, lObjects): ''' generate_statement_body ::= [ block_declarative_part begin ] { concurrent_statement } [ end [ alternative_label ] ; ] ''' iCurrent = utils.find_next_token(iToken, lObjects) iLast = iCurrent iCurrent = block_declarative_part.detect(iCurrent, lObjects) if iCurrent != iLast: iCurrent = utils.assign_next_token_required('begin', token.begin_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if('begin', token.begin_keyword, iCurrent, lObjects) iLast = 0 while iCurrent != iLast: iLast = iCurrent if utils.is_next_token_one_of(['elsif', 'else', 'when'], iCurrent, lObjects): return iCurrent if utils.is_next_token_one_of(['end'], iCurrent, lObjects): break iCurrent = concurrent_statement.detect(iCurrent, lObjects) if not utils.are_next_consecutive_tokens(['end', 'generate'], iCurrent, lObjects): iCurrent = utils.assign_next_token_required('end', token.end_keyword, iCurrent, lObjects) iCurrent = utils.assign_next_token_if_not(';', token.alternative_label, iCurrent, lObjects) iCurrent = utils.assign_next_token_required(';', token.semicolon, iCurrent, lObjects) return iCurrent