Пример #1
0
def _parse(fs_location_info: FileSystemLocationInfo,
           parser: TokenParser) -> Tuple[str, SymbolValueResolver]:
    type_str = parser.consume_mandatory_unquoted_string('SYMBOL-TYPE', True)

    if type_str not in _TYPE_SETUPS:
        err_msg = 'Invalid type: {}\nExpecting one of {}'.format(type_str, _TYPES_LIST_IN_ERR_MSG)
        raise SingleInstructionInvalidArgumentException(err_msg)

    value_parser = _TYPE_SETUPS[type_str]

    name_str = parser.consume_mandatory_unquoted_string('SYMBOL-NAME', True)

    if not symbol_syntax.is_symbol_name(name_str):
        err_msg = symbol_syntax.invalid_symbol_name_error(name_str)
        raise SingleInstructionInvalidArgumentException(err_msg)

    parser.consume_mandatory_constant_unquoted_string(syntax.ASSIGNMENT_ARGUMENT, True)

    consumes_whole_line, value_resolver = value_parser(fs_location_info, parser)

    if not consumes_whole_line and not parser.is_at_eol:
        msg = 'Superfluous arguments: ' + parser.remaining_part_of_current_line
        raise SingleInstructionInvalidArgumentException(msg)

    return name_str, value_resolver
Пример #2
0
 def _parse_file_specs(self, token_parser: TokenParser) -> Sequence[file_list.FileSpecificationSdv]:
     ret_val = []
     while not token_parser.has_valid_head_matching(_TOKEN_SPEC_LIST_END):
         ret_val.append(self._file_spec_parser.parse(token_parser))
         if not token_parser.has_valid_head_matching(_TOKEN_SPEC_LIST_END):
             token_parser.require_is_at_eol(self._superfluous_line_contents_message)
     return ret_val
Пример #3
0
 def _parse_from_tokens(self, token_parser: TokenParser) -> TheInstructionEmbryoBase:
     src_path = self._src_path_parser.parse_from_token_parser(token_parser)
     if token_parser.is_at_eol:
         return _CopySourceWithoutExplicitDestinationInstruction(src_path)
     dst_path = self._dst_path_parser.parse_from_token_parser(token_parser)
     token_parser.report_superfluous_arguments_if_not_at_eol()
     return _CopySourceWithExplicitDestinationInstruction(src_path, dst_path)
Пример #4
0
    def parse_from_token_parser(self,
                                parser: TokenParser) -> AssertPhaseInstruction:
        actual_file_constructor = self._actual_file_parser.parse_from_token_parser(
            parser)
        actual_file_assertion_part = parse_file_contents_assertion_part.parse(
            parser)
        parser.report_superfluous_arguments_if_not_at_eol()

        assertion_part_sequence = assertion_part.compose(
            IdentityAssertionPartWithValidationAndReferences(
                actual_file_constructor.validator,
                actual_file_constructor.references),
            FileConstructorAssertionPart(),
        )
        assertion_part_sequence = assertion_part.compose_with_sequence(
            assertion_part_sequence,
            IsExistingRegularFileAssertionPart(),
        )
        assertion_part_sequence = assertion_part.compose_with_sequence(
            assertion_part_sequence,
            actual_file_assertion_part,
        )
        return AssertionInstructionFromAssertionPart(
            assertion_part_sequence,
            lambda env: actual_file_constructor,
            actual_file_constructor.failure_message_header,
        )
Пример #5
0
    def _parse_from_tokens(self, tokens: TokenParser) -> _TheInstructionEmbryo:
        path_to_create = self._path_parser.parse_from_token_parser(tokens)
        file_maker_ = self._file_maker_parser.parse(tokens)

        tokens.report_superfluous_arguments_if_not_at_eol()

        return _TheInstructionEmbryo(path_to_create, file_maker_)
Пример #6
0
 def _parse_modifier(self, token_parser: TokenParser) -> _impl.ModifierSdv:
     if (token_parser.has_valid_head_token() and
             self._unset_keyword_matcher.matches(token_parser.head)):
         token_parser.consume_head()
         return self._parse_unset(token_parser)
     else:
         return self._parse_set(token_parser)
Пример #7
0
 def parse_from_token_parser(self, parser: TokenParser) -> int:
     parser.require_has_valid_head_token('INTEGER')
     int_token = parser.consume_mandatory_token('err msg format string')
     try:
         return int(int_token.string)
     except ValueError:
         raise SingleInstructionInvalidArgumentException('Not an int: ' +
                                                         int_token.string)
Пример #8
0
def _parse_from_token_parser(token_parser: TokenParser) -> NameAndValue[Actor]:
    token_parser.consume_mandatory_keyword(
        instruction_arguments.ASSIGNMENT_OPERATOR, False)
    ret_val = token_parser.parse_mandatory_command(
        _actor_parsers_setup(),
        concepts.ACTOR_CONCEPT_INFO.singular_name.upper())
    token_parser.report_superfluous_arguments_if_not_at_eol()
    return ret_val
Пример #9
0
 def parse_from_token_parser(self, token_parser: TokenParser) -> RegexSdv:
     token_parser.require_has_valid_head_token(
         syntax_elements.REGEX_SYNTAX_ELEMENT.singular_name)
     is_ignore_case = token_parser.consume_and_handle_optional_option(
         False, lambda x: True, IGNORE_CASE_OPTION_NAME)
     regex_pattern = self._string_parser.parse_from_token_parser(
         token_parser)
     return _RegexSdv(is_ignore_case, regex_pattern)
Пример #10
0
 def _parse_file(my_parser: TokenParser) -> FileMaker:
     src_file = parse_file_ref_from_token_parser(instruction_config.src_rel_opt_arg_conf,
                                                 my_parser)
     contents_transformer = parse_optional_transformer_resolver(parser)
     my_parser.report_superfluous_arguments_if_not_at_eol()
     return FileMakerForContentsFromExistingFile(instruction_config.source_info,
                                                 contents_transformer,
                                                 src_file)
Пример #11
0
 def parse_from_token_parser(
         self, parser: TokenParser) -> ComparisonActualFileConstructor:
     parser.require_is_not_at_eol('Missing {actual_file} argument'.format(
         actual_file=ACTUAL_PATH_ARGUMENT.name))
     path = self._path_parser.parse_from_token_parser(parser)
     parser.consume_mandatory_constant_unquoted_string(
         reserved_words.COLON, must_be_on_current_line=False)
     return actual_files.ConstructorForPath(
         path, actual_file_attributes.PLAIN_FILE_OBJECT_NAME, True)
Пример #12
0
def parse_mandatory_choice_with_default(parser: TokenParser,
                                        syntax_element_name: str,
                                        choices: Iterable[TokenSyntaxSetup[PARSE_RESULT]],
                                        default: ParserFromTokenParser) -> PARSE_RESULT:
    parser.require_existing_valid_head_token(syntax_element_name)
    for syntax_setup in choices:
        if syntax_setup.matcher.matches(parser.head):
            parser.consume_head()
            return syntax_setup.parser_after_token(parser)
    return default(parser)
Пример #13
0
    def parse_from_token_parser(self, token_parser: TokenParser) -> ArgumentsResolver:
        arguments = arguments_resolvers.empty()

        while not token_parser.is_at_eol:
            following_arguments = self._parse_element(token_parser)
            arguments = arguments.new_accumulated(following_arguments)

        token_parser.consume_current_line_as_string_of_remaining_part_of_current_line()

        return arguments
def parse_string_or_here_doc_from_token_parser(token_parser: TokenParser,
                                               consume_last_here_doc_line: bool = True
                                               ) -> Tuple[SourceType, StringResolver]:
    token_parser.require_head_token_has_valid_syntax()
    if token_parser.token_stream.head.source_string.startswith(parse_here_document.DOCUMENT_MARKER_PREFIX):
        here_doc = parse_here_document.parse_as_last_argument_from_token_parser(True, token_parser,
                                                                                consume_last_here_doc_line)
        return SourceType.HERE_DOC, here_doc
    else:
        string_resolver = parse_string.parse_string_from_token_parser(token_parser)
        return SourceType.STRING, string_resolver
Пример #15
0
 def _parse_from_start_str(self, here_doc_start: str,
                           token_parser: TokenParser) -> StringSdv:
     marker_match = re.fullmatch(string.HERE_DOCUMENT_TOKEN_RE,
                                 here_doc_start)
     if not marker_match:
         return _raise_not_a_here_doc_exception(here_doc_start)
     marker = marker_match.group(2)
     token_parser.report_superfluous_arguments_if_not_at_eol()
     token_parser.consume_current_line_as_string_of_remaining_part_of_current_line(
     )
     return self._parse_contents(marker, token_parser)
Пример #16
0
    def _parse_from_tokens(self, token_parser: TokenParser) -> InstructionEmbryo[None]:
        try:
            phases = self._parse_phases(token_parser)
            modifier = self._parse_modifier(token_parser)
            token_parser.report_superfluous_arguments_if_not_at_eol()

            return _impl.TheInstructionEmbryo(phases, modifier)

        except TokenSyntaxError as ex:
            raise SingleInstructionInvalidArgumentException(
                std_error_message_text_for_token_syntax_error_from_exception(ex))
Пример #17
0
    def _parse(self, parser: token_stream_parser.TokenParser) -> AssertPhaseInstruction:
        expectation_type = parser.consume_optional_negation_operator()

        path_to_check = parse_file_ref.parse_file_ref_from_token_parser(_REL_OPTION_CONFIG,
                                                                        parser)

        file_matcher = self._parse_optional_file_matcher(parser)

        parser.consume_current_line_as_string_of_remaining_part_of_current_line()

        return _Instruction(expectation_type, path_to_check, file_matcher)
Пример #18
0
    def parse(self, token_parser: TokenParser) -> StringTransformerSdv:
        token_parser.require_has_valid_head_token(_REGEX_ARGUMENT.name)
        mb_line_matcher = self._parser_of_lines_selection.parse_from_token_parser(token_parser)
        preserve_new_lines = token_parser.consume_optional_option(names.PRESERVE_NEW_LINES_OPTION_NAME)
        regex_sdv = self._parser_of_regex.parse_from_token_parser(token_parser)
        replacement = self._parser_of_replacement.parse(token_parser)

        return _impl.Sdv(mb_line_matcher,
                         preserve_new_lines,
                         regex_sdv,
                         replacement)
Пример #19
0
def parse_mandatory_choice(
    token_parser: TokenParser,
    syntax_element: str,
    choices: Sequence[TokenSyntaxSetup[PARSE_RESULT]],
) -> PARSE_RESULT:
    token_parser.require_existing_valid_head_token(syntax_element)
    for syntax_setup in choices:
        if syntax_setup.matcher.matches(token_parser.head):
            token_parser.consume_head()
            return syntax_setup.parser_after_token(token_parser)
    return token_parser.report_missing(syntax_element)
Пример #20
0
 def _parse_contents(marker: str, token_parser: TokenParser) -> StringSdv:
     here_doc = []
     while token_parser.has_current_line:
         line = token_parser.consume_remaining_part_of_current_line_as_string(
         )
         if line == marker:
             return _sdv_from_lines(here_doc)
         here_doc.append(line)
         token_parser.consume_current_line_as_string_of_remaining_part_of_current_line(
         )
     return _raise_end_marker_not_found(marker)
Пример #21
0
def parse_integer_matcher(parser: TokenParser,
                          name_of_lhs: str = 'LHS') -> IntegerMatcher:
    comparison_operator = parse_comparison_operator(parser)
    parser.require_is_not_at_eol(_MISSING_INTEGER_ARGUMENT)
    token = parser.consume_mandatory_token(_MISSING_INTEGER_ARGUMENT)
    try:
        integer_arg = python_evaluate(token.string)
        return IntegerMatcherFromComparisonOperator(name_of_lhs,
                                                    comparison_operator,
                                                    integer_arg)
    except NotAnIntegerException as ex:
        raise SingleInstructionInvalidArgumentException('Not an integer: ' + ex.value_string)
Пример #22
0
def parse_mandatory_choice_with_default(
    token_parser: TokenParser,
    syntax_element: str,
    choices: Sequence[TokenSyntaxSetup[PARSE_RESULT]],
    default: Callable[[TokenParser], PARSE_RESULT],
) -> PARSE_RESULT:
    token_parser.require_existing_valid_head_token(syntax_element)
    for syntax_setup in choices:
        if syntax_setup.matcher.matches(token_parser.head):
            token_parser.consume_head()
            return syntax_setup.parser_after_token(token_parser)
    return default(token_parser)
Пример #23
0
def parse(expectation_type: ExpectationType,
          token_parser: TokenParser) -> StringMatcherResolver:
    token_parser.require_has_valid_head_token(_EXPECTED_SYNTAX_ELEMENT_FOR_EQUALS)
    expected_contents = parse_here_doc_or_file_ref.parse_from_token_parser(
        token_parser,
        EXPECTED_FILE_REL_OPT_ARG_CONFIG,
        consume_last_here_doc_line=False)

    return value_resolver(
        expectation_type,
        expected_contents,
    )
Пример #24
0
    def _parse_optional_file_matcher(parser: token_stream_parser.TokenParser
                                     ) -> Optional[parse_file_matcher.FileMatcherResolver]:
        file_matcher = None

        if not parser.is_at_eol:
            parser.consume_mandatory_constant_unquoted_string(
                PROPERTIES_SEPARATOR,
                must_be_on_current_line=True)
            file_matcher = parse_file_matcher.parse_resolver(parser, must_be_on_current_line=False)
            parser.report_superfluous_arguments_if_not_at_eol()

        return file_matcher
Пример #25
0
def parse_optional_choice_with_default(
    token_parser: TokenParser,
    choices: Sequence[TokenSyntaxSetup[PARSE_RESULT]],
    default: Callable[[TokenParser], PARSE_RESULT],
) -> PARSE_RESULT:
    if token_parser.has_valid_head_token_on_current_line():
        head_token = token_parser.head
        for syntax_setup in choices:
            if syntax_setup.matcher.matches(head_token):
                token_parser.consume_head()
                return syntax_setup.parser_after_token(token_parser)
    return default(token_parser)
Пример #26
0
def parse_file_contents(instruction_config: InstructionConfig,
                        parser: TokenParser) -> FileMaker:
    """
    Parses a file contents specification of the form: [= FILE-MAKER]

    :raises SingleInstructionInvalidArgumentException: Invalid arguments
    """
    if parser.is_at_eol:
        return FileMakerForConstantContents(string_resolvers.str_constant(''))
    else:
        parser.consume_mandatory_constant_unquoted_string(CONTENTS_ASSIGNMENT_TOKEN, True)
        return parse_file_maker(instruction_config, parser)
Пример #27
0
def _parse_constant(tokens: TokenParser) -> FilesConditionSdv:
    elements = _parse_elements(tokens)

    tokens.require_has_valid_head_token(_FILE_NAME_OR_SET_END)

    tokens.consume_mandatory_keyword__part_of_syntax_element(
        syntax.LITERAL_END,
        False,
        syntax_elements.FILES_CONDITION_SYNTAX_ELEMENT.singular_name,
    )

    return files_conditions.new_constant(elements)
Пример #28
0
    def _file_quantified_assertion(self,
                                   quantifier: Quantifier,
                                   parser: TokenParser) -> FilesMatcherResolver:
        parser.consume_mandatory_constant_unquoted_string(config.QUANTIFICATION_OVER_FILE_ARGUMENT,
                                                          must_be_on_current_line=True)
        parser.consume_mandatory_constant_unquoted_string(
            instruction_arguments.QUANTIFICATION_SEPARATOR_ARGUMENT,
            must_be_on_current_line=True)
        matcher_on_file = parse_file_matcher.parse_resolver(parser)

        return self._file_quantified_assertion_part(quantifier,
                                                    matcher_on_file)
Пример #29
0
    def parse(self, token_parser: TokenParser) -> FilesSourceSdv:
        file_specs = self._parse_file_specs(token_parser)

        token_parser.require_has_valid_head_token(_FILE_NAME_OR_SET_END)

        token_parser.consume_mandatory_keyword__part_of_syntax_element(
            syntax.FILE_LIST_END,
            False,
            syntax_elements.FILES_SOURCE_SYNTAX_ELEMENT.singular_name,
        )

        return file_list.Sdv(file_specs)
Пример #30
0
 def _parse_recursive(
     self,
     token_parser: TokenParser,
 ) -> FullDepsWithDetailsDescriptionSdv[
         ModelConstructor[FilesMatcherModel]]:
     mb_min_depth = token_parser.consume_and_handle_optional_option3(
         self.DEPTH_INTEGER_PARSER.parse,
         file_or_dir_contents.MIN_DEPTH_OPTION.name)
     mb_max_depth = token_parser.consume_and_handle_optional_option3(
         self.DEPTH_INTEGER_PARSER.parse,
         file_or_dir_contents.MAX_DEPTH_OPTION.name)
     return dir_contents.model_constructor__recursive(
         mb_min_depth, mb_max_depth)
def parse_from_token_parser(token_parser: TokenParser,
                            conf: RelOptionArgumentConfiguration = CONFIGURATION,
                            consume_last_here_doc_line: bool = True) -> StringOrFileRefResolver:
    token_parser.require_head_token_has_valid_syntax()
    file_ref = token_parser.consume_and_handle_optional_option(
        None,
        functools.partial(parse_file_ref.parse_file_ref_from_token_parser, conf),
        FILE_ARGUMENT_OPTION)
    if file_ref is not None:
        return StringOrFileRefResolver(SourceType.PATH, None, file_ref)
    else:
        source_type, resolver = parse_string_or_here_doc_from_token_parser(token_parser, consume_last_here_doc_line)
        return StringOrFileRefResolver(source_type, resolver, None)
Пример #32
0
    def _parse_optional_file_matcher(
            parser: token_stream_parser.TokenParser
    ) -> Optional[FileMatcherSdv]:
        file_matcher = None

        if not parser.is_at_eol:
            parser.consume_mandatory_constant_unquoted_string(
                reserved_words.COLON, must_be_on_current_line=True)
            file_matcher = parse_file_matcher.parsers(
            ).full.parse_from_token_parser(parser)
            parser.report_superfluous_arguments_if_not_at_eol()

        return file_matcher
Пример #33
0
    def _parse(
            self,
            parser: token_stream_parser.TokenParser) -> AssertPhaseInstruction:
        expectation_type = parser.consume_optional_negation_operator()

        path_to_check = self._path_parser.parse_from_token_parser(parser)

        file_matcher = self._parse_optional_file_matcher(parser)

        parser.consume_current_line_as_string_of_remaining_part_of_current_line(
        )

        return _Instruction(expectation_type, path_to_check, file_matcher)
Пример #34
0
    def _parse_from_tokens(
            self, token_parser: TokenParser) -> InstructionEmbryo[None]:
        try:
            token_parser.consume_mandatory_keyword(defs.ASSIGNMENT_IDENTIFIER,
                                                   False)
            value = self._parse_value(token_parser)
            token_parser.report_superfluous_arguments_if_not_at_eol()

            return _impl.TheInstructionEmbryo(value)

        except TokenSyntaxError as ex:
            raise SingleInstructionInvalidArgumentException(
                std_error_message_text_for_token_syntax_error_from_exception(
                    ex))
Пример #35
0
def parse(expectation_type: ExpectationType,
          token_parser: TokenParser) -> StringMatcherResolver:
    is_full_match = token_parser.consume_and_handle_optional_option(False,
                                                                    lambda parser: True,
                                                                    matcher_options.FULL_MATCH_ARGUMENT_OPTION)
    token_parser.require_has_valid_head_token(syntax_elements.REGEX_SYNTAX_ELEMENT.singular_name)
    source_type, regex_resolver = parse_regex.parse_regex2(token_parser,
                                                           must_be_on_same_line=False)

    return value_resolver(
        expectation_type,
        is_full_match,
        regex_resolver,
    )
Пример #36
0
    def parse_from_token_parser(
            self, token_parser: TokenParser) -> Optional[StringSdv]:
        if not self._here_document_is_mandatory and token_parser.is_at_eol:
            return None

        token_parser.require_has_valid_head_token(
            _rs_syntax_elements.HERE_DOCUMENT_SYNTAX_ELEMENT_NAME)

        first_token = token_parser.token_stream.head
        if first_token.is_quoted:
            return _raise_not_a_here_doc_exception(
                token_parser.remaining_part_of_current_line)
        start_token = token_parser.consume_mandatory_token(
            'impl: will succeed since because of check above')
        return self._parse_from_start_str(start_token.string, token_parser)
Пример #37
0
def parse_regex2(parser: TokenParser,
                 must_be_on_same_line: bool = True,
                 consume_last_here_doc_line: bool = False) -> Tuple[SourceType, RegexResolver]:
    if must_be_on_same_line:
        parser.require_is_not_at_eol(MISSING_REGEX_ARGUMENT_ERR_MSG)

    is_ignore_case = parser.consume_and_handle_optional_option(False,
                                                               lambda x: True,
                                                               IGNORE_CASE_OPTION_NAME)
    if must_be_on_same_line:
        parser.require_is_not_at_eol(MISSING_REGEX_ARGUMENT_ERR_MSG)

    source_type, regex_pattern = parse_string_or_here_doc_from_token_parser(parser,
                                                                            consume_last_here_doc_line)
    return source_type, _RegexResolver(is_ignore_case, regex_pattern)
Пример #38
0
 def parse_from_token_parser(
         self, token_parser: TokenParser) -> Either[str, StringSdv]:
     token_parser.require_has_valid_head_token(self._conf.argument_name)
     head = token_parser.head
     if head.source_string.startswith(string.HERE_DOCUMENT_MARKER_PREFIX):
         string_sdv = self._here_doc_parser.parse_from_token_parser(
             token_parser)
         return Either.of_right(string_sdv)
     elif TEXT_UNTIL_EOL_TOKEN_MATCHER.matches(head):
         token_parser.consume_head()
         string_sdv = parse_string.parse_rest_of_line_as_single_string(
             token_parser, strip_space=True)
         return Either.of_right(string_sdv)
     else:
         return self._plain_string_parser.parse(token_parser)
Пример #39
0
def parse_after_quantifier_token(
        quantifier: Quantifier,
        element_predicate_parser: Parser[MatcherSdv[ELEMENT]],
        setup: quantifier_matchers.ElementSetup[MODEL, ELEMENT],
        token_parser: TokenParser,
) -> MatcherSdv[MODEL]:
    token_parser.consume_mandatory_constant_unquoted_string(
        setup.rendering.type_name,
        must_be_on_current_line=True)
    token_parser.consume_mandatory_constant_unquoted_string(
        logic.QUANTIFICATION_SEPARATOR_ARGUMENT,
        must_be_on_current_line=True)

    element_predicate = element_predicate_parser.parse_from_token_parser(token_parser)

    return quantifier_matchers.sdv(setup, quantifier, element_predicate)
Пример #40
0
def parse_file_maker(instruction_config: InstructionConfig,
                     parser: TokenParser) -> FileMaker:
    parser.require_has_valid_head_token(instruction_config.syntax_element)

    head_source_string = parser.token_stream.head.source_string
    if is_option_string(head_source_string):
        return _parse_file_maker_with_transformation(instruction_config,
                                                     parser)
    else:
        if head_source_string.startswith(parse_here_document.DOCUMENT_MARKER_PREFIX):
            contents = parse_here_document.parse_as_last_argument_from_token_parser(True, parser)
            return FileMakerForConstantContents(contents)
        else:
            contents = parse_string_from_token_parser(parser)
            parser.report_superfluous_arguments_if_not_at_eol()
            return FileMakerForConstantContents(contents)
Пример #41
0
def _parse_file_maker_with_transformation(instruction_config: InstructionConfig,
                                          parser: TokenParser) -> FileMaker:
    def _parse_program_from_stdout(my_parser: TokenParser) -> FileMaker:
        program = parse_program.parse_program(my_parser)
        return FileMakerForContentsFromProgram(instruction_config.source_info,
                                               ProcOutputFile.STDOUT,
                                               program)

    def _parse_program_from_stderr(my_parser: TokenParser) -> FileMaker:
        program = parse_program.parse_program(my_parser)
        return FileMakerForContentsFromProgram(instruction_config.source_info,
                                               ProcOutputFile.STDERR,
                                               program)

    def _parse_file(my_parser: TokenParser) -> FileMaker:
        src_file = parse_file_ref_from_token_parser(instruction_config.src_rel_opt_arg_conf,
                                                    my_parser)
        contents_transformer = parse_optional_transformer_resolver(parser)
        my_parser.report_superfluous_arguments_if_not_at_eol()
        return FileMakerForContentsFromExistingFile(instruction_config.source_info,
                                                    contents_transformer,
                                                    src_file)

    return parser.parse_mandatory_option({
        PROGRAM_OUTPUT_OPTIONS[ProcOutputFile.STDOUT]: _parse_program_from_stdout,
        PROGRAM_OUTPUT_OPTIONS[ProcOutputFile.STDERR]: _parse_program_from_stderr,
        FILE_OPTION: _parse_file,
    })
 def parse_from_token_parser(self, parser: TokenParser) -> ProgramResolver:
     instruction_argument = parser.consume_current_line_as_string_of_remaining_part_of_current_line()
     argument_resolver = string_resolvers.str_constant(instruction_argument)
     return ProgramResolverForCommand(
         command_resolvers.for_shell(argument_resolver,
                                     validators=[self.validator]),
         accumulator.empty())
Пример #43
0
    def parse(self, token_parser: TokenParser) -> List[ELEMENT]:
        ret_val = []

        while not token_parser.is_at_eol:
            if token_parser.remaining_part_of_current_line.strip() == defs.CONTINUATION_TOKEN:
                token_parser.consume_current_line_as_string_of_remaining_part_of_current_line()
                continue
            if token_parser.has_valid_head_matching(defs.IS_STOP_AT_TOKEN):
                break
            sym_name_or_element = self._element_parser.parse(token_parser)
            ret_val.append(self._mk_element.reduce(sym_name_or_element))

        if token_parser.is_at_eol:
            token_parser.consume_remaining_part_of_current_line_as_string()

        return ret_val
Пример #44
0
def _parse_arguments_generator(
        token_parser: TokenParser) -> arguments_generator.ArgumentsGenerator:
    return token_parser.consume_and_handle_first_matching_option_2(
        _default_arg_pos(), [
            (file_matcher.PROGRAM_ARG_OPTION__LAST, _mk_arg_pos__last),
            (file_matcher.PROGRAM_ARG_OPTION__MARKER, _mk_arg_pos__marker),
        ])
Пример #45
0
def _consume_elements_from_token_parser(token_parser: TokenParser) -> List[lrs.Element]:
    elements = []

    while not token_parser.is_at_eol:
        next_token = token_parser.consume_mandatory_token('Invalid list element')
        elements.append(element_of(next_token))

    return elements
def parse_optional_option(token_parser: TokenParser) -> StringTransformerSdv:
    """
    :return: The identity transformer, if transformer option is not given.
    """
    return token_parser.consume_and_handle_optional_option(
        IDENTITY_TRANSFORMER_SDV,
        _PARSER__ANY_LINE__SIMPLE.parse_from_token_parser,
        string_transformer.WITH_TRANSFORMED_CONTENTS_OPTION_NAME)
Пример #47
0
 def parse(self, parser: TokenParser) -> FilesMatcherResolver:
     matcher_name = parser.consume_mandatory_unquoted_string(
         syntax_elements.FILES_MATCHER_SYNTAX_ELEMENT.singular_name,
         False)
     if matcher_name in self.matcher_parsers:
         return self.matcher_parsers[matcher_name](parser)
     else:
         return self.parse_symbol_reference(matcher_name, parser)
Пример #48
0
def parse_optional_selection_resolver2(parser: TokenParser) -> Optional[FileMatcherResolver]:
    parser = token_stream_parser.token_parser_with_additional_error_message_format_map(
        parser,
        ADDITIONAL_ERROR_MESSAGE_TEMPLATE_FORMATS)
    return parser.consume_and_handle_optional_option(
        None,
        parse_resolver,
        SELECTION_OPTION.name)
Пример #49
0
    def parse_from_token_parser(self, parser: TokenParser) -> ComparisonActualFileConstructor:
        def _parse_program(_parser: TokenParser) -> ComparisonActualFileConstructor:
            program = parse_program.parse_program(_parser)
            return _ComparisonActualFileConstructorForProgram(self._checked_file, program)

        return parser.consume_and_handle_optional_option(self._default,
                                                         _parse_program,
                                                         OUTPUT_FROM_PROGRAM_OPTION_NAME)
Пример #50
0
def parse_optional_selection_resolver(parser: TokenParser) -> FileMatcherResolver:
    parser = token_stream_parser.token_parser_with_additional_error_message_format_map(
        parser,
        ADDITIONAL_ERROR_MESSAGE_TEMPLATE_FORMATS)
    return parser.consume_and_handle_optional_option(
        CONSTANT_TRUE_MATCHER_RESOLVER,
        parse_resolver,
        SELECTION_OPTION.name)
 def parse_from_token_parser(self, parser: TokenParser) -> ProgramSdv:
     instruction_argument = parser.consume_current_line_as_string_of_remaining_part_of_current_line(
     )
     return ProgramSdvForCommand(
         test_command_sdvs.for_interpret_py_file_that_must_exist(
             path_sdvs.of_rel_option_with_const_file_name(
                 RelOptionType.REL_ACT, instruction_argument)),
         AccumulatedComponents.empty())
Пример #52
0
def parse_string_matcher(parser: TokenParser) -> StringMatcherResolver:
    model_transformer = parse_string_transformer.parse_optional_transformer_resolver_preceding_mandatory_element(
        parser,
        COMPARISON_OPERATOR,
    )
    expectation_type = parser.consume_optional_negation_operator()
    matcher_except_transformation = _StringMatcherParser(expectation_type).parse(parser)
    return resolvers.new_with_transformation(model_transformer, matcher_except_transformation)
Пример #53
0
def parse_comparison_operator(parser: TokenParser) -> comparators.ComparisonOperator:
    token_string = parser.consume_mandatory_unquoted_string(OPERATOR_ARGUMENT.name, True)

    if token_string not in comparators.NAME_2_OPERATOR:
        raise SingleInstructionInvalidArgumentException('Invalid {op}: {actual}'.format(op=OPERATOR_ARGUMENT.name,
                                                                                        actual=token_string))

    return comparators.NAME_2_OPERATOR[token_string]
Пример #54
0
def parse_files_matcher(parser: TokenParser,
                        must_be_on_current_line: bool = True) -> FilesMatcherResolver:
    if must_be_on_current_line:
        parser.require_is_not_at_eol('Missing ' + syntax_elements.FILES_MATCHER_SYNTAX_ELEMENT.singular_name)

    mb_file_selector = parse_file_matcher.parse_optional_selection_resolver2(parser)
    expectation_type = parser.consume_optional_negation_operator()

    ret_val = _SIMPLE_MATCHER_PARSER.parse(parser)

    if expectation_type is ExpectationType.NEGATIVE:
        ret_val = negation.negation_matcher(ret_val)

    if mb_file_selector is not None:
        ret_val = sub_set_selection.sub_set_selection_matcher(mb_file_selector,
                                                              ret_val)

    return ret_val
Пример #55
0
def remaining_source(remaining_contents_of_first_line: str,
                     following_lines: Sequence[str] = ()) -> TokenParser:
    """
    :param remaining_contents_of_first_line: Part of the first line that has not been consumed.
    :return: Source with some initial content of the first line that has been consumed.
    """
    content = '\n'.join([remaining_contents_of_first_line] + list(following_lines))
    token_stream = TokenStream(content)
    return TokenParser(token_stream)
def parse_from_token_parser(parser: TokenParser) -> ProgramResolverForSymbolReference:
    symbol_string = parser.consume_mandatory_unquoted_string(instruction_arguments.SYMBOL_SYNTAX_ELEMENT_NAME,
                                                             True)
    if not symbol_syntax.is_symbol_name(symbol_string):
        raise SingleInstructionInvalidArgumentException('Invalid syntax of ' +
                                                        instruction_arguments.SYMBOL_SYNTAX_ELEMENT_NAME +
                                                        ': ' + symbol_string)
    arguments = parse_arguments.parse_from_token_parser(parser)
    return program_symbol_resolver.plain(symbol_string, arguments)
Пример #57
0
 def parse(self, token_parser: TokenParser) -> StringMatcherResolver:
     token_parser = token_parser_with_additional_error_message_format_map(token_parser, _FORMAT_MAP)
     matcher_name = token_parser.consume_mandatory_unquoted_string(
         instruction_arguments.STRING_MATCHER_PRIMITIVE_SYNTAX_ELEMENT,
         False)
     if matcher_name in self.parsers:
         return self.parsers[matcher_name](token_parser)
     else:
         return self._symbol_reference(matcher_name, token_parser)
Пример #58
0
 def parse_from_token_parser(self, parser: TokenParser) -> ComparisonActualFileConstructor:
     parser.require_is_not_at_eol(
         'Missing {actual_file} argument'.format(actual_file=ACTUAL_PATH_ARGUMENT.name))
     file_ref = parse_file_ref.parse_file_ref_from_token_parser(ACTUAL_RELATIVITY_CONFIGURATION,
                                                                parser)
     return actual_files.ComparisonActualFileConstructorForConstant(ActComparisonActualFileForFileRef(file_ref))
Пример #59
0
def _parse_plain_list_element(parser: TokenParser) -> ArgumentsResolver:
    token = parser.consume_mandatory_token('Invalid list element')
    element = parse_list.element_of(token)
    return arguments_resolvers.new_without_validation(list_resolvers.from_elements([element]))
Пример #60
0
def parse_as_command(parser: TokenParser) -> CommandResolver:
    parser.require_is_not_at_eol('Missing {COMMAND}', _PARSE_FORMAT_MAP)
    argument = parse_string.parse_rest_of_line_as_single_string_and_consume_line(parser)

    return command_resolvers.for_shell(argument)