Esempio n. 1
0
 def _variant_tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         self.var_name.tokenization(),
         TokenSequence.optional_new_line(),
         TokenSequence.singleton(defs.ASSIGNMENT_IDENTIFIER),
         TokenSequence.optional_new_line(),
         self.value.tokenization(),
     ])
Esempio n. 2
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         self._path.tokenization(),
         TokenSequence.optional_new_line(),
         TokenSequence.singleton(reserved_words.COLON),
         TokenSequence.optional_new_line(),
         self._matcher.tokenization(),
     ])
Esempio n. 3
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(self._file_type),
         TokenSequence.optional_new_line(),
         self._file_name.tokenization(),
         self._contents.tokenization(),
     ])
Esempio n. 4
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(reserved_words.PAREN_BEGIN),
         TokenSequence.optional_new_line(),
         self._element.tokenization(),
         self._end_paren()
     ])
Esempio n. 5
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(file_matcher.PROGRAM_MATCHER_NAME),
         TokenSequence.optional_new_line(),
         abstract_syntaxes.OptionallyOnNewLine(self._path_argument_position).tokenization(),
         self._program.tokenization(),
     ])
Esempio n. 6
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.optional_new_line(),
         self._expectation_type_tokens(),
         self._path.tokenization(),
         self._file_matcher_tokens()
     ])
Esempio n. 7
0
 def tokenization(self) -> TokenSequence:
     element_tokens = self._syntax.tokenization()
     return (TokenSequence.empty()
             if not element_tokens else TokenSequence.concat([
                 TokenSequence.optional_new_line(),
                 element_tokens,
             ]))
Esempio n. 8
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(names.REPLACE_TRANSFORMER_NAME),
         TokenSequence.optional_new_line(),
         token_sequences.FollowedByOptionalNewLineIfNonEmpty(
             self.lines_filter.tokenization()
         ),
         token_sequences.FollowedByOptionalNewLineIfNonEmpty(
             token_sequences.OptionalOption.of_option_name(
                 names.PRESERVE_NEW_LINES_OPTION_NAME,
                 self.preserve_new_lines,
             )
         ),
         self.regex_token.tokenization(),
         TokenSequence.optional_new_line(),
         self.replacement_token.tokenization(),
     ])
Esempio n. 9
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         self.transformed.tokenization(),
         TokenSequence.optional_new_line(),
         token_sequences.OptionWMandatoryValue.of_option_name(
             string_transformer.WITH_TRANSFORMED_CONTENTS_OPTION_NAME,
             self.transformer.tokenization(),
         ),
     ])
Esempio n. 10
0
 def _file_matcher_tokens(self) -> TokenSequence:
     if not self._file_matcher:
         return TokenSequence.empty()
     else:
         return TokenSequence.concat([
             TokenSequence.singleton(':'),
             TokenSequence.optional_new_line(),
             self._file_matcher.tokenization(),
         ])
Esempio n. 11
0
 def of_modification(
     modification: TokenSequence,
     contents: AbstractSyntax,
 ) -> TokenSequence:
     return TokenSequence.concat([
         modification,
         TokenSequence.optional_new_line(),
         contents.tokenization()
     ])
Esempio n. 12
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.optional_new_line(),
         OptionWMandatoryValue.of_option_name(
             OUTPUT_FROM_PROGRAM_OPTION_NAME,
             self._program.tokenization(),
         ),
         TokenSequence.new_line(),
         self._expectation.tokenization(),
     ])
Esempio n. 13
0
 def tokenization(self) -> TokenSequence:
     file_specs = collection.intersperse_list(
         TokenSequence.new_line(),
         [fs.tokenization() for fs in self._files])
     return TokenSequence.concat([
         TokenSequence.singleton(self.delimiter__begin),
         TokenSequence.preceded_by_optional_new_line_if_non_empty(
             TokenSequence.concat(file_specs), ),
         TokenSequence.optional_new_line(),
         TokenSequence.singleton(self.delimiter__end),
     ])
Esempio n. 14
0
    def runTest(self):
        # ARRANGE #
        cases = [
            NArrEx(
                'layout spec w optional-new line as empty',
                LAYOUT_SPEC__OPTIONAL_NEW_LINE_AS_EMPTY,
                '',
            ),
            NArrEx(
                'layout spec w optional new-line as new-line',
                LAYOUT_SPEC__OPTIONAL_NEW_LINE_AS_NEW_LINE,
                '\n',
            ),
        ]

        tok_seq = TokenSequence.optional_new_line()
        for case in cases:
            with self.subTest(case.name):
                # ACT #
                actual = tok_seq.layout(case.arrangement)
                # ASSERT #
                self.assertEqual(case.expectation, actual)
Esempio n. 15
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(logic.NOT_OPERATOR_NAME),
         TokenSequence.optional_new_line(),
         self._matcher.tokenization(),
     ])
Esempio n. 16
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.optional_new_line(),
         self._expectation.tokenization(),
     ])
Esempio n. 17
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(matcher.RUN_PROGRAM),
         TokenSequence.optional_new_line(),
         self._program.tokenization(),
     ])
Esempio n. 18
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(matcher_options.EQUALS_ARGUMENT),
         TokenSequence.optional_new_line(),
         self._expected.tokenization(),
     ])
Esempio n. 19
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(names.RUN_PROGRAM_TRANSFORMER_NAME),
         TokenSequence.optional_new_line(),
         self._program.tokenization(),
     ])
Esempio n. 20
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(syntax.COPY_CONTENTS_OF_EXISTING_DIR),
         TokenSequence.optional_new_line(),
         self._dir_to_copy.tokenization(),
     ])