Ejemplo n.º 1
0
def detect(iToken, lObjects):
    '''
    procedure_call_statement ::=
        [ label : ] procedure_call ;
    '''

    iCurrent = iToken
    # Move past label if it exists
    if utils.find_in_next_n_tokens(':', 2, iCurrent, lObjects):
        iCurrent = utils.find_next_token(iCurrent, lObjects)
        iCurrent += 1
        iCurrent = utils.find_next_token(iCurrent, lObjects)
        iCurrent += 1
    # Check if next token is keyword
    iCurrent = utils.find_next_token(iCurrent, lObjects)
    if lObjects[iCurrent].get_value().lower() in lKeywords:
        return iToken
    # Check if signal assignment operator exists
    if utils.find_in_range('<=', iCurrent, ';', lObjects):
        return iToken
    # Check if variable assignment operator exists
    if utils.find_in_range(':=', iCurrent, ';', lObjects):
        return iToken
    # Otherwise it must be a procedure_call_statement
    return classify(iToken, lObjects)
def detect(iToken, lObjects):
    '''
    interface_subprogram_declaration ::=
        interface_subprogram_specification [ is interface_subprogram_default ]
    '''
    iCurrent = utils.find_next_token(iToken, lObjects)
    iLast = iCurrent
    iCurrent = interface_subprogram_specification.detect(iCurrent, lObjects)
    if iLast != iCurrent:
        iCurrent = utils.find_next_token(iToken, lObjects)
        if utils.object_value_is(lObjects, iCurrent, 'is'):
            return classify(iCurrent, lObjects)
    return iToken
def detect(iCurrent, lObjects):
    '''
    association_element ::=
        [ formal_part => ] actual_part

    An association element will either end in a close parenthesis or a comma that is not within paranthesis.

    accociation_element [)|,]

    '''
    iOpenParenthesis = 0
    iCloseParenthesis = 0
    iToken = iCurrent
    while not utils.token_is_semicolon(iToken, lObjects):
        iToken = utils.find_next_token(iToken, lObjects)
        if utils.token_is_open_parenthesis(iToken, lObjects):
            iOpenParenthesis += 1
        if utils.token_is_close_parenthesis(iToken, lObjects):
            iCloseParenthesis += 1
        if iCloseParenthesis == iOpenParenthesis + 1:
            classify(iCurrent, iToken, lObjects, ')')
            return iToken
        if iCloseParenthesis == iOpenParenthesis:
            if utils.token_is_comma(iToken, lObjects):
                classify(iCurrent, iToken, lObjects, ',')
                return iToken
        iToken += 1
    return iToken
def detect(iToken, lObjects):
    '''
    component_instantiation_statement ::=
        instantiation_label :
            instantiated_unit
                [ generic_map_aspect ]
                [ port_map_aspect ] ;
    '''
    iCurrent = utils.find_next_token(iToken, lObjects)
    iCurrent = utils.increment_token_count(iCurrent)
    iCurrent = utils.find_next_token(iCurrent, lObjects)
    if not utils.object_value_is(lObjects, iCurrent, ':'):
        return iToken
    iCurrent = utils.increment_token_count(iCurrent)
    if instantiated_unit.detect(iCurrent, lObjects):
        return classify(iToken, lObjects)
    return iToken
Ejemplo n.º 5
0
def classify_until(lUntils, iToken, lObjects, oType=parser.todo):
    '''
    subtype_indication ::=
        [ resolution_indication ] type_mark [ constraint ]
    '''
    iCurrent = iToken
    iStop = len(lObjects) - 1
    iOpenParenthesis = 0
    iCloseParenthesis = 0
    while iCurrent < iStop:
        iCurrent = utils.find_next_token(iCurrent, lObjects)
        if utils.token_is_open_parenthesis(iCurrent, lObjects):
           iOpenParenthesis += 1
        if utils.token_is_close_parenthesis(iCurrent, lObjects):
           iCloseParenthesis += 1
#        print(f'{lObjects[iCurrent].get_value()} | {iOpenParenthesis} | {iCloseParenthesis} | {lUntils}')
        if iOpenParenthesis < iCloseParenthesis:
            break
        elif lObjects[iCurrent].get_value().lower() in lUntils:
            if utils.token_is_close_parenthesis(iCurrent, lObjects):
                if iOpenParenthesis == iCloseParenthesis:
                    utils.assign_token(lObjects, iCurrent, parser.close_parenthesis)
                    continue
                else:
                    break
            elif utils.token_is_comma(iCurrent, lObjects):
                if iOpenParenthesis == iCloseParenthesis:
                    break
                else:
                    utils.assign_token(lObjects, iCurrent, parser.comma)
            else:
                break
        else:
            sValue = lObjects[iCurrent].get_value()
            if sValue == ')':
                utils.assign_token(lObjects, iCurrent, parser.close_parenthesis)
            elif sValue == '(':
                utils.assign_token(lObjects, iCurrent, parser.open_parenthesis)
            elif sValue == '-':
                utils.assign_token(lObjects, iCurrent, parser.todo)
            elif sValue == '+':
                utils.assign_token(lObjects, iCurrent, parser.todo)
            elif sValue == '*':
                utils.assign_token(lObjects, iCurrent, parser.todo)
            elif sValue == '**':
                utils.assign_token(lObjects, iCurrent, parser.todo)
            elif sValue == '/':
                utils.assign_token(lObjects, iCurrent, parser.todo)
            elif sValue.lower() == 'downto':
                utils.assign_token(lObjects, iCurrent, direction.downto)
            elif sValue.lower() == 'to':
                utils.assign_token(lObjects, iCurrent, direction.to)
            else:
                utils.assign_token(lObjects, iCurrent, oType)
    return iCurrent
Ejemplo n.º 6
0
def classify_until(lUntils, iToken, lObjects, oType=parser.todo):
    '''
      name ::=
              simple_name
            | operator_symbol
            | character_literal
            | selected_name
            | indexed_name
            | slice_name
            | attribute_name
            | external_name

    NOTE: At the moment, everything will be set to parser.todo.
    '''

    iCurrent = iToken
    iStop = len(lObjects) - 1
    iOpenParenthesis = 0
    iCloseParenthesis = 0
    while iCurrent < iStop:
        iCurrent = utils.find_next_token(iCurrent, lObjects)
        if utils.token_is_open_parenthesis(iCurrent, lObjects):
            iOpenParenthesis += 1
        if utils.token_is_close_parenthesis(iCurrent, lObjects):
            iCloseParenthesis += 1
        if iOpenParenthesis < iCloseParenthesis:
            break
        elif lObjects[iCurrent].get_value().lower() in lUntils:
            break
        else:
            sValue = lObjects[iCurrent].get_value()
            if sValue == ')':
                utils.assign_token(lObjects, iCurrent,
                                   parser.close_parenthesis)
            elif sValue == '(':
                utils.assign_token(lObjects, iCurrent, parser.open_parenthesis)
            elif sValue == '-':
                utils.assign_token(lObjects, iCurrent, parser.todo)
            elif sValue == '+':
                utils.assign_token(lObjects, iCurrent, parser.todo)
            elif sValue == '*':
                utils.assign_token(lObjects, iCurrent, parser.todo)
            elif sValue == '**':
                utils.assign_token(lObjects, iCurrent, parser.todo)
            elif sValue == '/':
                utils.assign_token(lObjects, iCurrent, parser.todo)
            elif sValue.lower() == 'downto':
                utils.assign_token(lObjects, iCurrent, direction.downto)
            elif sValue.lower() == 'to':
                utils.assign_token(lObjects, iCurrent, direction.to)
            else:
                utils.assign_token(lObjects, iCurrent, oType)
    return iCurrent
Ejemplo n.º 7
0
def detect(iToken, lObjects):
    '''
    context_declaration ::=
        context identifier is
            context_clause
        end [ context ] [ context_simple_name ] ;
    '''

    iCurrent = utils.find_next_token(iToken, lObjects)
    if utils.object_value_is(lObjects, iCurrent, 'context'):
        if utils.find_in_range('is', iCurrent, ';', lObjects):
            return classify(iCurrent, lObjects)
    return iToken
Ejemplo n.º 8
0
def detect(iToken, lObjects):
    '''
    loop_statement ::=
        [ loop_label : ]
            [ iteration_scheme ] loop
                sequence_of_statements
            end loop [ loop_label ] ;
    '''
    if utils.find_in_next_n_tokens(':', 2, iToken, lObjects):
        iCurrent = utils.find_next_token(iToken, lObjects)
        iCurrent += 1
        iCurrent = utils.find_next_token(iCurrent, lObjects)
        iCurrent += 1
    else:
        iCurrent = iToken

    if iteration_scheme.detect(iCurrent, lObjects):
        return classify(iToken, lObjects)
    if utils.is_next_token('loop', iCurrent, lObjects):
        return classify(iToken, lObjects)

    return iToken
Ejemplo n.º 9
0
def classify_until(lUntils, iToken, lObjects):
    '''
    sensitivity_list ::=
        *signal*_name { , *signal*_name}
    '''

    iCurrent = iToken
    iLast = 0
    while iLast != iCurrent:
        iLast = iCurrent
        if lObjects[utils.find_next_token(iCurrent, lObjects)].get_value().lower() in lUntils:
            return iCurrent
        iCurrent = utils.assign_next_token_if(',', token.comma, iCurrent, lObjects)
        iCurrent = utils.assign_next_token(parser.todo, iCurrent, lObjects)
def detect(iToken, lObjects):
    '''
    for_generate_statement ::=
        *generate*_label :
            for *generate*_parameter_specification generate
                generate_statement_body
            end generate [ *generate*_label ] ;
    '''

    if utils.are_next_consecutive_tokens([None, ':', 'for'], iToken, lObjects):
        return classify(iToken, lObjects)
    if utils.are_next_consecutive_tokens(['for'], iToken, lObjects):
        iIndex = utils.find_next_token(iToken, lObjects)
        oToken = token.for_keyword(lObjects[iToken].get_value())
        utils.print_error_message('generate_label', oToken, iIndex, lObjects)
    return iToken
Ejemplo n.º 11
0
def classify_until(lUntils, iToken, lObjects):
    '''
    logical_name_list ::=
        logical_name { , logical_name }
    '''
    iCurrent = iToken
    iLast = 0
    while iLast != iCurrent:
        iLast = iCurrent
        if lObjects[utils.find_next_token(
                iCurrent, lObjects)].get_value().lower() in lUntils:
            return iCurrent
        iCurrent = utils.assign_next_token_if(',', token.comma, iCurrent,
                                              lObjects)
        iCurrent = utils.assign_next_token(token.logical_name, iCurrent,
                                           lObjects)
Ejemplo n.º 12
0
def detect(iToken, lObjects):
    '''
        case_generate_statement ::=
            *generate*_label :
                case expression generate
                    case_generate_alternative
                    { case_generate_alternative }
                end generate [ *generate*_label ] ;
    '''

    if utils.are_next_consecutive_tokens([None, ':', 'case'], iToken, lObjects):
        return classify(iToken, lObjects)
    if utils.are_next_consecutive_tokens(['case'], iToken, lObjects):
        iIndex = utils.find_next_token(iToken, lObjects)
        oToken = token.case_keyword(lObjects[iToken].get_value())
        utils.print_error_message('generate_label', oToken, iIndex, lObjects)
    return iToken
def detect(iToken, lObjects):
    '''
    package_declaration ::=
        package identifier is
            package_header
            package_declarative_part
        end [ package ] [ package_simple_name ] ;
    '''

    iCurrent = utils.find_next_token(iToken, lObjects)
    if utils.object_value_is(lObjects, iCurrent, 'package'):
        if not utils.find_in_next_n_tokens('body', 5, iCurrent, lObjects):
            if not utils.find_in_next_n_tokens('new', 5, iCurrent, lObjects):
                return classify(iToken, lObjects)
        else:
            return iToken

    return iToken
def detect(iToken, lObjects):
    '''
    if_generate_statement ::=
        *generate*_label :
            if [ *alternative*_label : ] condition generate
                generate_statement_body
            { elsif [ *alternative_label* : ] condition generate
                generate_statement_body }
            [ else [ *alternative_label* : ] generate
                generate_statement_body ]
            end generate [ *generate*_label ] ;
    '''

    if utils.are_next_consecutive_tokens([None, ':', 'if'], iToken, lObjects):
        return classify(iToken, lObjects)
    if utils.are_next_consecutive_tokens(['if'], iToken, lObjects):
        iIndex = utils.find_next_token(iToken, lObjects)
        oToken = token.if_keyword(lObjects[iToken].get_value())
        utils.print_error_message('generate_label', oToken, iIndex, lObjects)
    return iToken
Ejemplo n.º 15
0
def classify_until(lUntils, iToken, lObjects):
    iCurrent = iToken
    iStop = len(lObjects) - 1
    iOpenParenthesis = 0
    iCloseParenthesis = 0
    while iCurrent < iStop:
        iCurrent = utils.find_next_token(iCurrent, lObjects)
        if utils.token_is_open_parenthesis(iCurrent, lObjects):
            iOpenParenthesis += 1
        if utils.token_is_close_parenthesis(iCurrent, lObjects):
            iCloseParenthesis += 1
        if iOpenParenthesis < iCloseParenthesis:
            break
        elif iOpenParenthesis == iCloseParenthesis:
            if lObjects[iCurrent].get_value().lower() in lUntils:
                break
            else:
                utils.assign_token(lObjects, iCurrent, parser.todo)
        else:
            utils.assign_token(lObjects, iCurrent, parser.todo)
    return iCurrent
def classify(iToken, lObjects):
    '''
    generate_statement_body ::=
            [ block_declarative_part
        begin ]
            { concurrent_statement }
        [ end [ alternative_label ] ; ]
    '''
    iCurrent = utils.find_next_token(iToken, lObjects)
    iLast = iCurrent
    iCurrent = block_declarative_part.detect(iCurrent, lObjects)

    if iCurrent != iLast:
        iCurrent = utils.assign_next_token_required('begin',
                                                    token.begin_keyword,
                                                    iCurrent, lObjects)

    iCurrent = utils.assign_next_token_if('begin', token.begin_keyword,
                                          iCurrent, lObjects)

    iLast = 0
    while iCurrent != iLast:
        iLast = iCurrent
        if utils.is_next_token_one_of(['elsif', 'else', 'when'], iCurrent,
                                      lObjects):
            return iCurrent
        if utils.is_next_token_one_of(['end'], iCurrent, lObjects):
            break
        iCurrent = concurrent_statement.detect(iCurrent, lObjects)

    if not utils.are_next_consecutive_tokens(['end', 'generate'], iCurrent,
                                             lObjects):
        iCurrent = utils.assign_next_token_required('end', token.end_keyword,
                                                    iCurrent, lObjects)
        iCurrent = utils.assign_next_token_if_not(';', token.alternative_label,
                                                  iCurrent, lObjects)
        iCurrent = utils.assign_next_token_required(';', token.semicolon,
                                                    iCurrent, lObjects)

    return iCurrent
Ejemplo n.º 17
0
def classify(iToken, lObjects):
    '''
    subtype_indication ::=
        [ resolution_indication ] type_mark [ constraint ]
    '''

    iCurrent = iToken
    iStop = len(lObjects) - 1
    iOpenParenthesis = 0
    iCloseParenthesis = 0
    while iCurrent < iStop:
        iCurrent = utils.find_next_token(iCurrent, lObjects)
        if utils.token_is_open_parenthesis(iCurrent, lObjects):
           iOpenParenthesis += 1
        if utils.token_is_close_parenthesis(iCurrent, lObjects):
           iCloseParenthesis += 1
        if iOpenParenthesis < iCloseParenthesis:
            break
        else:
            if utils.is_next_token(',', iCurrent, lObjects):
                utils.assign_token(lObjects, iCurrent, token.comma)
            else:
                utils.assign_token(lObjects, iCurrent, parser.todo)
    return iCurrent