Example #1
0
 def cases_for_word(reserved: str) -> List[TC]:
     return [
         TC(
             _src('{hard_quote}{reserved}{hard_quote}',
                  hard_quote=HARD_QUOTE_CHAR,
                  reserved=reserved),
             Expectation(
                 fragments=[constant(reserved)],
                 token_stream=assert_token_stream(is_null=asrt.is_true),
             )),
         TC(
             _src('{hard_quote}{reserved}{hard_quote} plain-word2',
                  hard_quote=HARD_QUOTE_CHAR,
                  reserved=reserved),
             Expectation(
                 fragments=[constant(reserved)],
                 token_stream=assert_token_stream(
                     head_token=assert_token_string_is('plain-word2')),
             )),
         TC(
             _src('{soft_quote}{reserved}{soft_quote} plain-word2',
                  soft_quote=SOFT_QUOTE_CHAR,
                  reserved=reserved),
             Expectation(
                 fragments=[constant(reserved)],
                 token_stream=assert_token_stream(
                     head_token=assert_token_string_is('plain-word2')),
             )),
     ]
Example #2
0
    def test_no_symbol_references(self):
        # ARRANGE #
        option_cases = [
            option_case_for_no_option(asrt.anything_goes()),
            option_case_for_ignore_case(asrt.anything_goes()),
        ]

        source_cases = [
            SourceCase('single invalid star',
                       Arguments('*'),
                       assert_token_stream(is_null=asrt.is_true)
                       ),
            SourceCase('invalid stars inside valid regex',
                       Arguments('before**after'),
                       assert_token_stream(is_null=asrt.is_true)
                       ),
        ]
        # ACT & ASSERT #
        check_many(self,
                   Arrangement(),
                   source_cases,
                   ExpectationExceptPattern(
                       validation=pre_sds_validation_fails(),
                   ),
                   option_cases,
                   )
Example #3
0
    def test_no_symbol_references(self):
        # ARRANGE #
        option_cases = [
            option_case_for_no_option(asrt.anything_goes()),
            option_case_for_ignore_case(asrt.anything_goes()),
        ]

        source_cases = [
            SourceCase('single invalid star',
                       Arguments('*'),
                       assert_token_stream(is_null=asrt.is_true)
                       ),
            SourceCase('invalid stars inside valid regex',
                       Arguments('before**after'),
                       assert_token_stream(is_null=asrt.is_true)
                       ),
        ]
        # ACT & ASSERT #
        check_many(self,
                   Arrangement(),
                   source_cases,
                   ExpectationExceptPattern(
                       validation=validation.ValidationAssertions.pre_sds_fails__w_any_msg(),
                   ),
                   option_cases,
                   )
Example #4
0
def successful_parse_of_single_symbol() -> List[TC]:
    symbol_ref = SymbolWithReferenceSyntax('validSymbol_name_1')
    single_symbol = [symbol(symbol_ref.name)]
    return [
        TC(
            _src('{symbol_reference}', symbol_reference=symbol_ref),
            Expectation(
                fragments=single_symbol,
                token_stream=assert_token_stream(is_null=asrt.is_true),
            )),
        TC(
            _src('{symbol_reference} rest', symbol_reference=symbol_ref),
            Expectation(
                fragments=single_symbol,
                token_stream=assert_token_stream(
                    head_token=assert_token_string_is('rest')),
            )),
        TC(
            _src('{soft_quote}{symbol_reference}{soft_quote} rest',
                 soft_quote=SOFT_QUOTE_CHAR,
                 symbol_reference=symbol_ref),
            Expectation(
                fragments=single_symbol,
                token_stream=assert_token_stream(
                    head_token=assert_token_string_is('rest')),
            )),
    ]
Example #5
0
    def test_symbol_references(self):
        # ARRANGE #
        star_string_symbol = NameAndValue('STAR_SYMBOL',
                                          container(string_resolvers.str_constant('* ')))

        regex_str = '.* regex'

        regex_arg_str = '.{star}regex'.format(
            star=symbol_reference_syntax_for_name(star_string_symbol.name),
        )

        matches_for_case_sensitive = [' regex', 'before regex after']

        option_cases = [
            option_case_for_no_option(
                _AssertPattern(regex_str,
                               matching_strings=matches_for_case_sensitive,
                               non_matching_string=' REGEX')
            ),

            option_case_for_ignore_case(
                _AssertPattern(regex_str,
                               matching_strings=matches_for_case_insensitive(matches_for_case_sensitive),
                               non_matching_string='regex')
            ),
        ]

        source_cases = [
            SourceCase('single invalid star',
                       Arguments(regex_arg_str),
                       assert_token_stream(is_null=asrt.is_true)
                       ),
            SourceCase('invalid stars at start of regex',
                       Arguments(surrounded_by_soft_quotes(regex_arg_str)),
                       assert_token_stream(is_null=asrt.is_true)
                       ),
        ]

        arrangement = Arrangement(
            symbols=SymbolTable({
                star_string_symbol.name: star_string_symbol.value,
            })
        )

        expectation = ExpectationExceptPattern(
            references=asrt.matches_sequence([
                is_reference_to_valid_regex_string_part(star_string_symbol.name),
            ]),
            validation=all_validations_passes(),
        )

        # ACT & ASSERT #

        check_many(self,
                   arrangement,
                   source_cases,
                   expectation,
                   option_cases,
                   )
Example #6
0
    def test_quoted_tokens(self):
        # ARRANGE #
        regex_str = '.* regex'

        matches_for_case_sensitive = [' regex', 'before regex after']

        option_cases = [
            option_case_for_no_option(
                _AssertPattern(regex_str,
                               matching_strings=matches_for_case_sensitive,
                               non_matching_string=' REGEX')
            ),

            option_case_for_ignore_case(
                _AssertPattern(regex_str,
                               matching_strings=matches_for_case_insensitive(matches_for_case_sensitive),
                               non_matching_string='regex')
            ),
        ]

        text_on_following_line = 'text on following line'

        source_cases = [
            SourceCase(
                'soft quotes',

                source=
                Arguments('{regex}'.format(
                    regex=surrounded_by_soft_quotes(regex_str),
                )),

                source_assertion=
                assert_token_stream(is_null=asrt.is_true),
            ),
            SourceCase(
                'hard quotes, and text on following line',

                source=
                Arguments('{regex}'.format(
                    regex=surrounded_by_hard_quotes(regex_str),
                ),
                    following_lines=[text_on_following_line]),

                source_assertion=
                assert_token_stream(
                    remaining_source=asrt.equals('\n' + text_on_following_line)),
            ),
        ]
        # ACT & ASSERT #
        check_many(self,
                   Arrangement(),
                   source_cases,
                   ExpectationExceptPattern(
                       validation=ValidationAssertions.all_passes()
                   ),
                   option_cases,
                   )
Example #7
0
    def test_quoted_tokens(self):
        # ARRANGE #
        regex_str = '.* regex'

        matches_for_case_sensitive = [' regex', 'before regex after']

        option_cases = [
            option_case_for_no_option(
                _AssertPattern(regex_str,
                               matching_strings=matches_for_case_sensitive,
                               non_matching_string=' REGEX')
            ),

            option_case_for_ignore_case(
                _AssertPattern(regex_str,
                               matching_strings=matches_for_case_insensitive(matches_for_case_sensitive),
                               non_matching_string='regex')
            ),
        ]

        text_on_following_line = 'text on following line'

        source_cases = [
            SourceCase(
                'soft quotes',

                source=
                Arguments('{regex}'.format(
                    regex=surrounded_by_soft_quotes(regex_str),
                )),

                source_assertion=
                assert_token_stream(is_null=asrt.is_true),
            ),
            SourceCase(
                'hard quotes, and text on following line',

                source=
                Arguments('{regex}'.format(
                    regex=surrounded_by_hard_quotes(regex_str),
                ),
                    following_lines=[text_on_following_line]),

                source_assertion=
                assert_token_stream(
                    remaining_source=asrt.equals('\n' + text_on_following_line)),
            ),
        ]
        # ACT & ASSERT #
        check_many(self,
                   Arrangement(),
                   source_cases,
                   ExpectationExceptPattern(
                       validation=all_validations_passes()
                   ),
                   option_cases,
                   )
Example #8
0
    def test_symbol_references(self):
        # ARRANGE #
        star_string_symbol = StringSymbolContext.of_constant('STAR_SYMBOL', '* ')

        regex_str = '.* regex'

        regex_arg_str = '.{star}regex'.format(
            star=symbol_reference_syntax_for_name(star_string_symbol.name),
        )

        matches_for_case_sensitive = [' regex', 'before regex after']

        option_cases = [
            option_case_for_no_option(
                _AssertPattern(regex_str,
                               matching_strings=matches_for_case_sensitive,
                               non_matching_string=' REGEX')
            ),

            option_case_for_ignore_case(
                _AssertPattern(regex_str,
                               matching_strings=matches_for_case_insensitive(matches_for_case_sensitive),
                               non_matching_string='regex')
            ),
        ]

        source_cases = [
            SourceCase('single invalid star',
                       Arguments(regex_arg_str),
                       assert_token_stream(is_null=asrt.is_true)
                       ),
            SourceCase('invalid stars at start of regex',
                       Arguments(surrounded_by_soft_quotes(regex_arg_str)),
                       assert_token_stream(is_null=asrt.is_true)
                       ),
        ]

        arrangement = Arrangement(
            symbols=star_string_symbol.symbol_table
        )

        expectation = ExpectationExceptPattern(
            references=asrt.matches_sequence([
                is_reference_to__regex_string_part(star_string_symbol.name),
            ]),
            validation=ValidationAssertions.all_passes(),
        )

        # ACT & ASSERT #

        check_many(self,
                   arrangement,
                   source_cases,
                   expectation,
                   option_cases,
                   )
    def test_successful_parse_of_unquoted_tokens(self):
        # ARRANGE #
        regex_str = 'regex'
        replacement_str = 'replacement'

        text_on_following_line = 'text on following line'

        expected_resolver = resolved_value_is_replace_transformer(regex_str,
                                                                  replacement_str)
        cases = [
            SourceCase(
                'transformer is only source',
                source=
                remaining_source('{regex} {replacement}'.format(
                    regex=regex_str,
                    replacement=replacement_str,
                )),
                source_assertion=
                assert_token_stream(is_null=asrt.is_true),
            ),
            SourceCase(
                'transformer is followed by a token',
                source=
                remaining_source('{regex} {replacement} following_token'.format(
                    regex=regex_str,
                    replacement=replacement_str,
                )),
                source_assertion=
                assert_token_stream(
                    is_null=asrt.is_false,
                    remaining_part_of_current_line=asrt.equals('following_token')),
            ),
            SourceCase(
                'transformer is only element on current line, but followed by more lines',
                source=
                remaining_source('{regex} {replacement}'.format(
                    regex=regex_str,
                    replacement=replacement_str),
                    following_lines=[text_on_following_line]),
                source_assertion=
                assert_token_stream(
                    is_null=asrt.is_false,
                    remaining_source=asrt.equals('\n' + text_on_following_line)),
            ),
        ]
        for case in cases:
            with self.subTest(case_name=case.name):
                self._check(case.source,
                            Expectation(expected_resolver,
                                        case.source_assertion))
Example #10
0
    def test_successful_parse(self):
        # ARRANGE #
        def model_of(rhs: int) -> ModelInfo:
            return ModelInfo((rhs, 'irrelevant line contents'))

        expected_integer_matcher = IntegerMatcherFromComparisonOperator(
            exactly_lib.test_case_utils.line_matcher.impl.line_number.LINE_NUMBER_PROPERTY,
            comparators.LT,
            69)
        expected_resolver = resolved_value_is_line_number_matcher(expected_integer_matcher,
                                                                  [
                                                                      model_of(60),
                                                                      model_of(69),
                                                                      model_of(72),

                                                                  ])

        text_on_following_line = 'text on following line'

        cases = [
            SourceCase(
                'simple comparison',

                source=
                remaining_source('< 69',
                                 following_lines=[text_on_following_line]),

                source_assertion=
                assert_token_stream(
                    remaining_source=asrt.equals('\n' + text_on_following_line)),
            ),
            SourceCase(
                'following tokens on same line',

                source=
                remaining_source('< 69 next',
                                 following_lines=[text_on_following_line]),

                source_assertion=
                assert_token_stream(
                    remaining_source=asrt.equals('next' + '\n' + text_on_following_line)),
            ),
        ]
        for case in cases:
            with self.subTest(case_name=case.name):
                self._check(case.source,
                            Expectation(expected_resolver,
                                        case.source_assertion))
 def test_look_ahead_state_pass(self):
     # ARRANGE #
     assertion = sut.assert_token_stream(look_ahead_state=asrt.is_(LookAheadState.NULL))
     # ACT & ASSERT #
     token_stream = TokenStream('')
     self.assertTrue(token_stream.look_ahead_state is LookAheadState.NULL,
                     'sanity check: this test assumes that the constructor of the token parser '
                     'sets correct look-ahead state')
     assertion.apply_without_message(self, token_stream)
 def test_head_token_fail(self):
     # ARRANGE #
     token_stream = TokenStream('a_token')
     failing_token_assertion = asrt.sub_component('token_type',
                                                  Token.is_quoted.fget,
                                                  asrt.is_true)
     assertion = sut.assert_token_stream(head_token=failing_token_assertion)
     # ACT & ASSERT #
     assert_that_assertion_fails(assertion, token_stream)
 def test_head_token_pass(self):
     # ARRANGE #
     token_stream = TokenStream('a_token')
     passing_token_assertion = asrt.sub_component('token_type',
                                                  Token.is_quoted.fget,
                                                  asrt.is_false)
     assertion = sut.assert_token_stream(head_token=passing_token_assertion)
     # ACT & ASSERT #
     assertion.apply_without_message(self, token_stream)
 def test_fail(self):
     # ARRANGE #
     assertion = sut.assert_token_stream(look_ahead_state=asrt.is_(LookAheadState.NULL))
     # ACT & ASSERT #
     token_stream = TokenStream('a_token')
     self.assertTrue(token_stream.look_ahead_state is LookAheadState.HAS_TOKEN,
                     'sanity check: this test assumes that the constructor of the token parser '
                     'sets correct look-ahead state')
     assert_that_assertion_fails(assertion, token_stream)
 def test_fail(self):
     # ARRANGE #
     assertion = sut.assert_token_stream(look_ahead_state=asrt.is_(LookAheadState.NULL))
     # ACT & ASSERT #
     token_stream = TokenStream('a_token')
     self.assertTrue(token_stream.look_ahead_state is LookAheadState.HAS_TOKEN,
                     'sanity check: this test assumes that the constructor of the token parser '
                     'sets correct look-ahead state')
     assert_that_assertion_fails(assertion, token_stream)
 def test_head_token_fail(self):
     # ARRANGE #
     token_stream = TokenStream('a_token')
     failing_token_assertion = asrt.sub_component('token_type',
                                                  Token.is_quoted.fget,
                                                  asrt.is_true)
     assertion = sut.assert_token_stream(head_token=failing_token_assertion)
     # ACT & ASSERT #
     assert_that_assertion_fails(assertion, token_stream)
 def test_head_token_pass(self):
     # ARRANGE #
     token_stream = TokenStream('a_token')
     passing_token_assertion = asrt.sub_component('token_type',
                                                  Token.is_quoted.fget,
                                                  asrt.is_false)
     assertion = sut.assert_token_stream(head_token=passing_token_assertion)
     # ACT & ASSERT #
     assertion.apply_without_message(self, token_stream)
 def test_look_ahead_state_pass(self):
     # ARRANGE #
     assertion = sut.assert_token_stream(look_ahead_state=asrt.is_(LookAheadState.NULL))
     # ACT & ASSERT #
     token_stream = TokenStream('')
     self.assertTrue(token_stream.look_ahead_state is LookAheadState.NULL,
                     'sanity check: this test assumes that the constructor of the token parser '
                     'sets correct look-ahead state')
     assertion.apply_without_message(self, token_stream)
Example #19
0
def successful_parse_of_constant() -> List[TC]:
    return [
        TC(
            _src('plain-word1 plain-word2'),
            Expectation(
                fragments=[constant('plain-word1')],
                token_stream=assert_token_stream(
                    head_token=assert_token_string_is('plain-word2')),
            )),
        TC(
            _src('word'),
            Expectation(
                fragments=[constant('word')],
                token_stream=assert_token_stream(is_null=asrt.is_true),
            )),
        TC(
            _src('{soft_quote}double quoted word{soft_quote} plain-word2',
                 soft_quote=SOFT_QUOTE_CHAR),
            Expectation(
                fragments=[constant('double quoted word')],
                token_stream=assert_token_stream(
                    head_token=assert_token_string_is('plain-word2')),
            )),
        TC(
            _src('{hard_quote}single quoted word{hard_quote} plain-word2',
                 hard_quote=HARD_QUOTE_CHAR),
            Expectation(
                fragments=[constant('single quoted word')],
                token_stream=assert_token_stream(
                    head_token=assert_token_string_is('plain-word2')),
            )),
        TC(
            _src('{hard_quote}{symbol_reference}{hard_quote} plain-word2',
                 symbol_reference=symbol_reference_syntax_for_name('sym_name'),
                 hard_quote=HARD_QUOTE_CHAR),
            Expectation(
                fragments=[
                    constant(symbol_reference_syntax_for_name('sym_name'))
                ],
                token_stream=assert_token_stream(
                    head_token=assert_token_string_is('plain-word2')),
            )),
    ]
Example #20
0
    def test_symbol_references_without_dir_dependencies(self):
        # ARRANGE #

        option_cases = [
            option_case_for_no_option(asrt.anything_goes()),
            option_case_for_ignore_case(asrt.anything_goes()),
        ]

        sym_ref_to_star = symbol_reference_syntax_for_name(self.STAR_STRING_SYMBOL.name)

        source_cases = [
            SourceCase('single invalid star',
                       Arguments(sym_ref_to_star),
                       assert_token_stream(is_null=asrt.is_true)
                       ),
            SourceCase('invalid stars at start of regex',
                       Arguments(sym_ref_to_star +
                                 'after'),
                       assert_token_stream(is_null=asrt.is_true)
                       ),
        ]

        arrangement = Arrangement(
            symbols=SymbolTable({
                self.STAR_STRING_SYMBOL.name: self.STAR_STRING_SYMBOL.value,
            })
        )

        expectation = ExpectationExceptPattern(
            references=asrt.matches_sequence([
                is_reference_to_valid_regex_string_part(self.STAR_STRING_SYMBOL.name),
            ]),
            validation=pre_sds_validation_fails(),
        )

        # ACT & ASSERT #

        check_many(self,
                   arrangement,
                   source_cases,
                   expectation,
                   option_cases,
                   )
    def test_successful_parse_of_quoted_tokens(self):
        # ARRANGE #
        regex_str = 'the regex'
        replacement_str = 'the replacement'

        text_on_following_line = 'text on following line'

        expected_resolver = resolved_value_is_replace_transformer(regex_str,
                                                                  replacement_str)
        cases = [
            SourceCase(
                'soft quotes',

                source=
                remaining_source('{regex} {replacement}'.format(
                    regex=surrounded_by_soft_quotes(regex_str),
                    replacement=surrounded_by_soft_quotes(replacement_str),
                )),

                source_assertion=
                assert_token_stream(is_null=asrt.is_true),
            ),
            SourceCase(
                'hard quotes, and text on following line',

                source=
                remaining_source('{regex} {replacement}'.format(
                    regex=surrounded_by_hard_quotes(regex_str),
                    replacement=surrounded_by_hard_quotes(replacement_str),
                ),
                    following_lines=[text_on_following_line]),

                source_assertion=
                assert_token_stream(
                    remaining_source=asrt.equals('\n' + text_on_following_line)),
            ),
        ]
        for case in cases:
            with self.subTest(case_name=case.name):
                self._check(case.source,
                            Expectation(expected_resolver,
                                        case.source_assertion))
Example #22
0
    def test_symbol_references_without_dir_dependencies(self):
        # ARRANGE #

        option_cases = [
            option_case_for_no_option(asrt.anything_goes()),
            option_case_for_ignore_case(asrt.anything_goes()),
        ]

        sym_ref_to_star = symbol_reference_syntax_for_name(self.STAR_STRING_SYMBOL.name)

        source_cases = [
            SourceCase('single invalid star',
                       Arguments(sym_ref_to_star),
                       assert_token_stream(is_null=asrt.is_true)
                       ),
            SourceCase('invalid stars at start of regex',
                       Arguments(sym_ref_to_star +
                                 'after'),
                       assert_token_stream(is_null=asrt.is_true)
                       ),
        ]

        arrangement = Arrangement(
            symbols=self.STAR_STRING_SYMBOL.symbol_table
        )

        expectation = ExpectationExceptPattern(
            references=asrt.matches_sequence([
                is_reference_to__regex_string_part(self.STAR_STRING_SYMBOL.name),
            ]),
            validation=validation.ValidationAssertions.pre_sds_fails__w_any_msg(),
        )

        # ACT & ASSERT #

        check_many(self,
                   arrangement,
                   source_cases,
                   expectation,
                   option_cases,
                   )
 def test_pass_when_no_explicit_component_assertion(self):
     test_cases = [
         NameAndValue('Empty source', ''),
         NameAndValue('Single token source', 'a_token'),
         NameAndValue('Two token source', 'first_token second'),
     ]
     for test_case in test_cases:
         with self.subTest(test_case.name):
             # ARRANGE #
             token_stream = TokenStream(test_case.value)
             assertion = sut.assert_token_stream()
             # ACT & ASSERT #
             assertion.apply_without_message(self, token_stream)
 def test_pass_when_no_explicit_component_assertion(self):
     test_cases = [
         ('', 'Empty source'),
         ('a_token', 'Single token source'),
         ('first_token second', 'Two token source'),
     ]
     for source, description in test_cases:
         with self.subTest(msg=description):
             # ARRANGE #
             token_stream = TokenStream(source)
             assertion = sut.assert_token_stream()
             # ACT & ASSERT #
             assertion.apply_without_message(self, token_stream)
Example #25
0
    def test_successful_parse(self):
        # ARRANGE #
        def model_of(rhs: int) -> ModelInfo:
            return ModelInfo((rhs, 'irrelevant line contents'))

        expected_integer_matcher = _ExpectedEquivalentLineNumMatcher(
            comparators.LT, 69)
        expected_sdv = resolved_value_is_line_number_matcher(
            expected_integer_matcher, [
                model_of(60),
                model_of(69),
                model_of(72),
            ])

        text_on_following_line = 'text on following line'

        cases = [
            SourceCase(
                'simple comparison',
                source=remaining_source(
                    '< 69', following_lines=[text_on_following_line]),
                source_assertion=assert_token_stream(
                    remaining_source=asrt.equals('\n' +
                                                 text_on_following_line)),
            ),
            SourceCase(
                'following tokens on same line',
                source=remaining_source(
                    '< 69 next', following_lines=[text_on_following_line]),
                source_assertion=assert_token_stream(
                    remaining_source=asrt.equals('next' + '\n' +
                                                 text_on_following_line)),
            ),
        ]
        for case in cases:
            with self.subTest(case_name=case.name):
                self._check(case.source,
                            Expectation(expected_sdv, case.source_assertion))
Example #26
0
    def test_symbol_references_with_dir_dependencies(self):
        # ARRANGE #

        path_symbol_name = 'PATH_SYMBOL'

        regex_source_string = (symbol_reference_syntax_for_name(self.STAR_STRING_SYMBOL.name) +
                               symbol_reference_syntax_for_name(path_symbol_name))

        expectation = Expectation(
            references=asrt.matches_sequence([
                is_reference_to__regex_string_part(self.STAR_STRING_SYMBOL.name),
                is_reference_to__regex_string_part(path_symbol_name),
            ]),
            validation=ValidationAssertions.post_sds_fails__w_any_msg(),
            token_stream=assert_token_stream(is_null=asrt.is_true),
        )

        rel_opt_cases = [
            RelOptionType.REL_HDS_CASE,
            RelOptionType.REL_CWD,
            RelOptionType.REL_ACT,
        ]

        for rel_opt in rel_opt_cases:
            path_symbol = PathDdvSymbolContext.of_no_suffix(path_symbol_name, rel_opt)

            arrangement = Arrangement(
                symbols=SymbolContext.symbol_table_of_contexts([
                    self.STAR_STRING_SYMBOL,
                    path_symbol,
                ])
            )

            # ACT & ASSERT #

            self._check(regex_source_string,
                        arrangement,
                        expectation,
                        )
Example #27
0
    def test_symbol_references_with_dir_dependencies(self):
        # ARRANGE #

        file_ref_symbol_name = 'FILE_REF_SYMBOL'

        regex_source_string = (symbol_reference_syntax_for_name(self.STAR_STRING_SYMBOL.name) +
                               symbol_reference_syntax_for_name(file_ref_symbol_name))

        expectation = Expectation(
            references=asrt.matches_sequence([
                is_reference_to_valid_regex_string_part(self.STAR_STRING_SYMBOL.name),
                is_reference_to_valid_regex_string_part(file_ref_symbol_name),
            ]),
            validation=post_sds_validation_fails(),
            token_stream=assert_token_stream(is_null=asrt.is_true),
        )

        rel_opt_cases = [
            RelOptionType.REL_HOME_CASE,
            RelOptionType.REL_CWD,
            RelOptionType.REL_ACT,
        ]

        for rel_opt in rel_opt_cases:
            file_ref_resolver = file_ref_resolvers.of_rel_option(rel_opt)

            arrangement = Arrangement(
                symbols=SymbolTable({
                    self.STAR_STRING_SYMBOL.name: self.STAR_STRING_SYMBOL.value,
                    file_ref_symbol_name: container(file_ref_resolver),
                })
            )

            # ACT & ASSERT #

            self._check(regex_source_string,
                        arrangement,
                        expectation,
                        )
Example #28
0
def successful_parse_of_complex_structure() -> List[TC]:
    return [
        TC(
            _src(
                '  {soft_quote}{sym_ref1}const 1{not_a_symbol_name1}{not_a_symbol_name2}const 2{sym_ref2}{soft_quote}',
                soft_quote=SOFT_QUOTE_CHAR,
                sym_ref1=symbol_reference_syntax_for_name('sym_name1'),
                not_a_symbol_name1=symbol_reference_syntax_for_name(
                    'not a sym1'),
                not_a_symbol_name2=symbol_reference_syntax_for_name(
                    'not a sym2'),
                sym_ref2=symbol_reference_syntax_for_name('sym_name2')),
            Expectation(
                fragments=[
                    symbol('sym_name1'),
                    constant('const 1' +
                             symbol_reference_syntax_for_name('not a sym1') +
                             symbol_reference_syntax_for_name('not a sym2') +
                             'const 2'),
                    symbol('sym_name2')
                ],
                token_stream=assert_token_stream(is_null=asrt.is_true),
            ))
    ]
 def test_remaining_part_of_current_line_pass(self):
     # ARRANGE #
     token_stream = TokenStream('first line\nsecond line')
     assertion = sut.assert_token_stream(remaining_part_of_current_line=asrt.equals('first line'))
     # ACT & ASSERT #
     assertion.apply_without_message(self, token_stream)
 def test_position_fail(self):
     # ARRANGE #
     token_stream = TokenStream('a_token')
     assertion = sut.assert_token_stream(position=asrt.equals(2))
     # ACT & ASSERT #
     assert_that_assertion_fails(assertion, token_stream)
Example #31
0
    def test(self):
        test_cases = [
            # Single token
            ('a', 'a',
             assert_token_stream(is_null=asrt.is_true,
                                 look_ahead_state=asrt.is_(LookAheadState.NULL),
                                 position=asrt.equals(1),
                                 remaining_source=asrt.equals(''))),
            ('b ', 'b ',
             assert_token_stream(is_null=asrt.is_true,
                                 look_ahead_state=asrt.is_(LookAheadState.NULL),
                                 position=asrt.equals(2),
                                 remaining_source=asrt.equals(''))),
            ('x \n', 'x ',
             assert_token_stream(is_null=asrt.is_true,
                                 look_ahead_state=asrt.is_(LookAheadState.NULL),
                                 position=asrt.equals(2),
                                 remaining_source=asrt.equals('\n'))
             ),
            ('x\ny', 'x',
             assert_token_stream(head_token=assert_plain('y'),
                                 look_ahead_state=asrt.is_not(LookAheadState.NULL),
                                 position=asrt.equals(1),
                                 remaining_source=asrt.equals('\ny'))
             ),
            ('x\n y', 'x',
             assert_token_stream(head_token=assert_plain('y'),
                                 look_ahead_state=asrt.is_not(LookAheadState.NULL),
                                 position=asrt.equals(1),
                                 remaining_source=asrt.equals('\n y'))
             ),
            # Multiple tokens
            ('a A', 'a A',
             assert_token_stream(is_null=asrt.is_true,
                                 look_ahead_state=asrt.is_(LookAheadState.NULL),
                                 position=asrt.equals(3),
                                 remaining_source=asrt.equals(''))
             ),
            ('a A\nb B', 'a A',
             assert_token_stream(head_token=assert_plain('b'),
                                 look_ahead_state=asrt.is_not(LookAheadState.NULL),
                                 position=asrt.equals(3),
                                 remaining_source=asrt.equals('\nb B'))
             ),
            ('a A\ninvalid_token"', 'a A',
             assert_token_stream(look_ahead_state=asrt.is_(LookAheadState.SYNTAX_ERROR),
                                 is_null=asrt.is_true,
                                 position=asrt.equals(3),
                                 remaining_source=asrt.equals('\ninvalid_token"'))
             ),
            # No tokens
            ('', '',
             assert_token_stream(is_null=asrt.is_true,
                                 look_ahead_state=asrt.is_(LookAheadState.NULL),
                                 position=asrt.equals(0),
                                 remaining_source=asrt.equals(''))
             ),
            (' ', ' ',
             assert_token_stream(is_null=asrt.is_true,
                                 look_ahead_state=asrt.is_(LookAheadState.NULL),
                                 position=asrt.equals(1),
                                 remaining_source=asrt.equals(''))
             ),
            (' \n', ' ',
             assert_token_stream(is_null=asrt.is_true,
                                 look_ahead_state=asrt.is_(LookAheadState.NULL),
                                 position=asrt.equals(1),
                                 remaining_source=asrt.equals('\n'))
             ),
            (' \n ', ' ',
             assert_token_stream(is_null=asrt.is_true,
                                 look_ahead_state=asrt.is_(LookAheadState.NULL),
                                 position=asrt.equals(1),
                                 remaining_source=asrt.equals('\n '))
             ),
            (' \n"invalid quoting', ' ',
             assert_token_stream(look_ahead_state=asrt.is_(LookAheadState.SYNTAX_ERROR),
                                 position=asrt.equals(1),
                                 remaining_source=asrt.equals('\n"invalid quoting'))
             ),

        ]
        for source, expected_consumed_string, token_stream_assertion in test_cases:
            with self.subTest(msg=repr(source)):
                ts = sut.TokenStream(source)
                # ACT #
                actual_consumed_string = ts.consume_remaining_part_of_current_line_as_string()
                # ASSERT #
                self.assertEqual(expected_consumed_string,
                                 actual_consumed_string,
                                 'consumed string')
                token_stream_assertion.apply_with_message(self, ts, 'token stream after parse')
Example #32
0
    def test_unquoted_tokens(self):
        # ARRANGE #
        regex_str = 'a.*regex'

        matches_for_case_sensitive = ['aBCregex', 'aBCregex and more']

        option_cases = [
            option_case_for_no_option(
                _AssertPattern(regex_str,
                               matching_strings=matches_for_case_sensitive,
                               non_matching_string='AbcREGEX')
            ),

            option_case_for_ignore_case(
                _AssertPattern(regex_str,
                               matching_strings=matches_for_case_insensitive(matches_for_case_sensitive),
                               non_matching_string='Abc')
            ),
        ]

        text_on_following_line = 'text on following line'

        def _arg(format_string: str) -> str:
            return format_string.format(
                regex=regex_str)

        source_cases = [
            SourceCase(
                'REG-EX is only source',
                source=
                Arguments(_arg('{regex}')),
                source_assertion=
                assert_token_stream(is_null=asrt.is_true),
            ),
            SourceCase(
                'REG-EX is followed by a token',
                source=
                Arguments(_arg('{regex} following_token')),
                source_assertion=
                assert_token_stream(
                    is_null=asrt.is_false,
                    remaining_part_of_current_line=asrt.equals('following_token')),
            ),
            SourceCase(
                'REG-EX is only element on current line, but followed by more lines',
                source=
                Arguments(_arg('{regex}'),
                          following_lines=[text_on_following_line]),
                source_assertion=
                assert_token_stream(
                    is_null=asrt.is_false,
                    remaining_part_of_current_line=asrt.equals(''),
                    remaining_source=asrt.equals('\n' + text_on_following_line)),
            ),
        ]
        # ACT & ASSERT #
        check_many(self,
                   Arrangement(),
                   source_cases,
                   ExpectationExceptPattern(
                       validation=all_validations_passes()
                   ),
                   option_cases,
                   )
 def test_position_fail(self):
     # ARRANGE #
     token_stream = TokenStream('a_token')
     assertion = sut.assert_token_stream(position=asrt.equals(2))
     # ACT & ASSERT #
     assert_that_assertion_fails(assertion, token_stream)
Example #34
0
        CheckExistingFile(configuration),
        CheckExistingButNonExecutableFile(configuration),
        CheckNonExistingFile(configuration)
    ])
    return ret_val


class Arrangement:
    def __init__(self,
                 tcds_populator: TcdsPopulator,
                 symbols: SymbolTable = None):
        self.tcds_populator = tcds_populator
        self.symbols = symbol_table_from_none_or_value(symbols)


token_stream_is_null = assert_token_stream(is_null=asrt.is_true)


def token_stream_has_remaining_source(source: str) -> Assertion:
    return assert_token_stream(is_null=asrt.is_false,
                               remaining_source=asrt.equals(source))


class ExpectationOnExeFile:
    def __init__(
        self,
        path_ddv: PathDdv,
        expected_symbol_references: List[SymbolReference],
        symbol_for_value_checks: Optional[SymbolTable] = None,
    ):
        self.symbol_for_value_checks = symbol_for_value_checks
 def test_source_pass(self):
     # ARRANGE #
     token_stream = TokenStream('a_token')
     assertion = sut.assert_token_stream(source=asrt.equals('a_token'))
     # ACT & ASSERT #
     assertion.apply_without_message(self, token_stream)
 def test_is_null_fail(self):
     # ARRANGE #
     token_stream = TokenStream('a_token')
     assertion = sut.assert_token_stream(is_null=asrt.is_true)
     # ACT & ASSERT #
     assert_that_assertion_fails(assertion, token_stream)
Example #37
0
    def test_unquoted_tokens(self):
        # ARRANGE #
        regex_str = 'a.*regex'

        matches_for_case_sensitive = ['aBCregex', 'aBCregex and more']

        option_cases = [
            option_case_for_no_option(
                _AssertPattern(regex_str,
                               matching_strings=matches_for_case_sensitive,
                               non_matching_string='AbcREGEX')
            ),

            option_case_for_ignore_case(
                _AssertPattern(regex_str,
                               matching_strings=matches_for_case_insensitive(matches_for_case_sensitive),
                               non_matching_string='Abc')
            ),
        ]

        text_on_following_line = 'text on following line'

        def _arg(format_string: str) -> str:
            return format_string.format(
                regex=regex_str)

        source_cases = [
            SourceCase(
                'REG-EX is only source',
                source=
                Arguments(_arg('{regex}')),
                source_assertion=
                assert_token_stream(is_null=asrt.is_true),
            ),
            SourceCase(
                'REG-EX is followed by a token',
                source=
                Arguments(_arg('{regex} following_token')),
                source_assertion=
                assert_token_stream(
                    is_null=asrt.is_false,
                    remaining_part_of_current_line=asrt.equals('following_token')),
            ),
            SourceCase(
                'REG-EX is only element on current line, but followed by more lines',
                source=
                Arguments(_arg('{regex}'),
                          following_lines=[text_on_following_line]),
                source_assertion=
                assert_token_stream(
                    is_null=asrt.is_false,
                    remaining_part_of_current_line=asrt.equals(''),
                    remaining_source=asrt.equals('\n' + text_on_following_line)),
            ),
        ]
        # ACT & ASSERT #
        check_many(self,
                   Arrangement(),
                   source_cases,
                   ExpectationExceptPattern(
                       validation=ValidationAssertions.all_passes()
                   ),
                   option_cases,
                   )
 def test_remaining_part_of_current_line_fail(self):
     # ARRANGE #
     token_stream = TokenStream('first line\nsecond line')
     assertion = sut.assert_token_stream(remaining_part_of_current_line=asrt.equals('second line'))
     # ACT & ASSERT #
     assert_that_assertion_fails(assertion, token_stream)
 def test_remaining_source_after_head_fail(self):
     # ARRANGE #
     token_stream = TokenStream('fst_token snd_token')
     assertion = sut.assert_token_stream(remaining_source_after_head=asrt.equals('fst_token'))
     # ACT & ASSERT #
     assert_that_assertion_fails(assertion, token_stream)
 def test_source_pass(self):
     # ARRANGE #
     token_stream = TokenStream('a_token')
     assertion = sut.assert_token_stream(source=asrt.equals('a_token'))
     # ACT & ASSERT #
     assertion.apply_without_message(self, token_stream)
 def test_is_null_pass(self):
     # ARRANGE #
     token_stream = TokenStream('')
     assertion = sut.assert_token_stream(is_null=asrt.is_true)
     # ACT & ASSERT #
     assertion.apply_without_message(self, token_stream)
 def test_remaining_source_after_head_fail(self):
     # ARRANGE #
     token_stream = TokenStream('fst_token snd_token')
     assertion = sut.assert_token_stream(remaining_source_after_head=asrt.equals('fst_token'))
     # ACT & ASSERT #
     assert_that_assertion_fails(assertion, token_stream)
 def test_remaining_source_after_head_pass(self):
     # ARRANGE #
     token_stream = TokenStream('fst_token snd_token')
     assertion = sut.assert_token_stream(remaining_source_after_head=asrt.equals('snd_token'))
     # ACT & ASSERT #
     assertion.apply_without_message(self, token_stream)
 def test_is_null_fail(self):
     # ARRANGE #
     token_stream = TokenStream('a_token')
     assertion = sut.assert_token_stream(is_null=asrt.is_true)
     # ACT & ASSERT #
     assert_that_assertion_fails(assertion, token_stream)
 def test_remaining_source_fail(self):
     # ARRANGE #
     token_stream = TokenStream('a_token')
     assertion = sut.assert_token_stream(remaining_source=asrt.equals('not remaining source'))
     # ACT & ASSERT #
     assert_that_assertion_fails(assertion, token_stream)
 def test_remaining_part_of_current_line_fail(self):
     # ARRANGE #
     token_stream = TokenStream('first line\nsecond line')
     assertion = sut.assert_token_stream(remaining_part_of_current_line=asrt.equals('second line'))
     # ACT & ASSERT #
     assert_that_assertion_fails(assertion, token_stream)
 def test_remaining_part_of_current_line_pass(self):
     # ARRANGE #
     token_stream = TokenStream('first line\nsecond line')
     assertion = sut.assert_token_stream(remaining_part_of_current_line=asrt.equals('first line'))
     # ACT & ASSERT #
     assertion.apply_without_message(self, token_stream)
 def test_remaining_source_fail(self):
     # ARRANGE #
     token_stream = TokenStream('a_token')
     assertion = sut.assert_token_stream(remaining_source=asrt.equals('not remaining source'))
     # ACT & ASSERT #
     assert_that_assertion_fails(assertion, token_stream)
 def test_remaining_source_after_head_pass(self):
     # ARRANGE #
     token_stream = TokenStream('fst_token snd_token')
     assertion = sut.assert_token_stream(remaining_source_after_head=asrt.equals('snd_token'))
     # ACT & ASSERT #
     assertion.apply_without_message(self, token_stream)
Example #50
0
 def test(self):
     test_cases = [
         # Single token
         ('a', 'a',
          assert_token_stream(is_null=asrt.is_true,
                              look_ahead_state=asrt.is_(
                                  LookAheadState.NULL),
                              position=asrt.equals(1),
                              remaining_source=asrt.equals(''))),
         ('b ', 'b ',
          assert_token_stream(is_null=asrt.is_true,
                              look_ahead_state=asrt.is_(
                                  LookAheadState.NULL),
                              position=asrt.equals(2),
                              remaining_source=asrt.equals(''))),
         ('x \n', 'x ',
          assert_token_stream(is_null=asrt.is_true,
                              look_ahead_state=asrt.is_(
                                  LookAheadState.NULL),
                              position=asrt.equals(2),
                              remaining_source=asrt.equals('\n'))),
         ('x\ny', 'x',
          assert_token_stream(head_token=assert_plain('y'),
                              look_ahead_state=asrt.is_not(
                                  LookAheadState.NULL),
                              position=asrt.equals(1),
                              remaining_source=asrt.equals('\ny'))),
         ('x\n y', 'x',
          assert_token_stream(head_token=assert_plain('y'),
                              look_ahead_state=asrt.is_not(
                                  LookAheadState.NULL),
                              position=asrt.equals(1),
                              remaining_source=asrt.equals('\n y'))),
         # Multiple tokens
         ('a A', 'a A',
          assert_token_stream(is_null=asrt.is_true,
                              look_ahead_state=asrt.is_(
                                  LookAheadState.NULL),
                              position=asrt.equals(3),
                              remaining_source=asrt.equals(''))),
         ('a A\nb B', 'a A',
          assert_token_stream(head_token=assert_plain('b'),
                              look_ahead_state=asrt.is_not(
                                  LookAheadState.NULL),
                              position=asrt.equals(3),
                              remaining_source=asrt.equals('\nb B'))),
         ('a A\ninvalid_token"', 'a A',
          assert_token_stream(
              look_ahead_state=asrt.is_(LookAheadState.SYNTAX_ERROR),
              is_null=asrt.is_true,
              position=asrt.equals(3),
              remaining_source=asrt.equals('\ninvalid_token"'))),
         # No tokens
         ('', '',
          assert_token_stream(is_null=asrt.is_true,
                              look_ahead_state=asrt.is_(
                                  LookAheadState.NULL),
                              position=asrt.equals(0),
                              remaining_source=asrt.equals(''))),
         (' ', ' ',
          assert_token_stream(is_null=asrt.is_true,
                              look_ahead_state=asrt.is_(
                                  LookAheadState.NULL),
                              position=asrt.equals(1),
                              remaining_source=asrt.equals(''))),
         (' \n', ' ',
          assert_token_stream(is_null=asrt.is_true,
                              look_ahead_state=asrt.is_(
                                  LookAheadState.NULL),
                              position=asrt.equals(1),
                              remaining_source=asrt.equals('\n'))),
         (' \n ', ' ',
          assert_token_stream(is_null=asrt.is_true,
                              look_ahead_state=asrt.is_(
                                  LookAheadState.NULL),
                              position=asrt.equals(1),
                              remaining_source=asrt.equals('\n '))),
         (' \n"invalid quoting', ' ',
          assert_token_stream(
              look_ahead_state=asrt.is_(LookAheadState.SYNTAX_ERROR),
              position=asrt.equals(1),
              remaining_source=asrt.equals('\n"invalid quoting'))),
     ]
     for source, expected_consumed_string, token_stream_assertion in test_cases:
         with self.subTest(msg=repr(source)):
             ts = sut.TokenStream(source)
             # ACT #
             actual_consumed_string = ts.consume_remaining_part_of_current_line_as_string(
             )
             # ASSERT #
             self.assertEqual(expected_consumed_string,
                              actual_consumed_string, 'consumed string')
             token_stream_assertion.apply_with_message(
                 self, ts, 'token stream after parse')
 def test_is_null_pass(self):
     # ARRANGE #
     token_stream = TokenStream('')
     assertion = sut.assert_token_stream(is_null=asrt.is_true)
     # ACT & ASSERT #
     assertion.apply_without_message(self, token_stream)
Example #52
0
    def test_here_document(self):
        # ARRANGE #
        star_string_symbol = StringSymbolContext.of_constant('STAR_SYMBOL', '*')

        regex_str = '.*'

        regex_source_line = '.' + symbol_reference_syntax_for_name(star_string_symbol.name)

        arrangement = Arrangement(
            symbols=star_string_symbol.symbol_table
        )

        matches_for_case_sensitive = []

        option_cases = [
            option_case_for_no_option(
                _AssertPattern(regex_str + '\n',
                               matching_strings=matches_for_case_sensitive,
                               non_matching_string='missing new line')
            ),

            option_case_for_ignore_case(
                _AssertPattern(regex_str + '\n',
                               matching_strings=matches_for_case_insensitive(matches_for_case_sensitive),
                               non_matching_string='missing new line')
            ),
        ]

        text_on_following_line = 'text on following line'

        here_doc_argument = here_document([regex_source_line])
        source_cases = [
            SourceCase(
                'end of file after HD',
                source=
                here_doc_argument,

                source_assertion=
                assert_token_stream(remaining_part_of_current_line=asrt.equals(''),
                                    remaining_source=asrt.equals('')),
            ),
            SourceCase(
                'followed by more lines',
                source=
                here_doc_argument.followed_by_lines([text_on_following_line]),

                source_assertion=
                assert_token_stream(remaining_part_of_current_line=asrt.equals(''),
                                    remaining_source=asrt.equals('\n' + text_on_following_line)),
            ),
        ]

        expectation = ExpectationExceptPattern(
            references=asrt.matches_sequence([
                is_reference_to__regex_string_part(star_string_symbol.name),
            ]),
            validation=ValidationAssertions.all_passes(),
        )

        # ACT & ASSERT #

        check_many(self,
                   arrangement,
                   source_cases,
                   expectation,
                   option_cases,
                   )
Example #53
0
def token_stream_has_remaining_source(source: str) -> Assertion:
    return assert_token_stream(is_null=asrt.is_false,
                               remaining_source=asrt.equals(source))
Example #54
0
    def test_here_document(self):
        # ARRANGE #
        star_string_symbol = NameAndValue('STAR_SYMBOL',
                                          container(string_resolvers.str_constant('*')))

        regex_str = '.*'

        regex_source_line = '.' + symbol_reference_syntax_for_name(star_string_symbol.name)

        arrangement = Arrangement(
            symbols=SymbolTable({
                star_string_symbol.name: star_string_symbol.value,
            })
        )

        matches_for_case_sensitive = []

        option_cases = [
            option_case_for_no_option(
                _AssertPattern(regex_str + '\n',
                               matching_strings=matches_for_case_sensitive,
                               non_matching_string='missing new line')
            ),

            option_case_for_ignore_case(
                _AssertPattern(regex_str + '\n',
                               matching_strings=matches_for_case_insensitive(matches_for_case_sensitive),
                               non_matching_string='missing new line')
            ),
        ]

        text_on_following_line = 'text on following line'

        source_cases = [
            SourceCase(
                'end of file after HD',
                source=
                here_document([regex_source_line]),

                source_assertion=
                assert_token_stream(is_null=asrt.is_true),
            ),
            SourceCase(
                'followed by more lines',
                source=
                here_document([regex_source_line]).followed_by_lines([text_on_following_line]),

                source_assertion=
                assert_token_stream(remaining_part_of_current_line=asrt.equals(text_on_following_line)),
            ),
        ]

        expectation = ExpectationExceptPattern(
            references=asrt.matches_sequence([
                is_reference_to_valid_regex_string_part(star_string_symbol.name),
            ]),
            validation=all_validations_passes(),
        )

        # ACT & ASSERT #

        check_many(self,
                   arrangement,
                   source_cases,
                   expectation,
                   option_cases,
                   )