Ejemplo n.º 1
0
 def _transformation_tokenizer(transformer: TokenSequence) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.new_line(),
         token_sequences.OptionWMandatoryValue.of_option_name(
             syntax_elements.WITH_TRANSFORMED_CONTENTS_OPTION_NAME,
             transformer,
         ),
     ])
Ejemplo n.º 2
0
 def _stdin_tokenizer(string_source: TokenSequence) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.new_line(),
         token_sequences.OptionWMandatoryValue.of_option_name(
             syntax_elements.STDIN_OPTION_NAME,
             string_source,
         ),
     ])
Ejemplo n.º 3
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.optional_new_line(),
         OptionWMandatoryValue.of_option_name(
             OUTPUT_FROM_PROGRAM_OPTION_NAME,
             self._program.tokenization(),
         ),
         TokenSequence.new_line(),
         self._expectation.tokenization(),
     ])
Ejemplo n.º 4
0
 def tokenization(self) -> TokenSequence:
     file_specs = collection.intersperse_list(
         TokenSequence.new_line(),
         [fs.tokenization() for fs in self._files])
     return TokenSequence.concat([
         TokenSequence.singleton(self.delimiter__begin),
         TokenSequence.preceded_by_optional_new_line_if_non_empty(
             TokenSequence.concat(file_specs), ),
         TokenSequence.optional_new_line(),
         TokenSequence.singleton(self.delimiter__end),
     ])
Ejemplo n.º 5
0
    def runTest(self):
        # ARRANGE #
        cases = [
            NameAndValue(
                'layout spec w optional-new line as empty',
                LAYOUT_SPEC__OPTIONAL_NEW_LINE_AS_EMPTY,
            ),
            NameAndValue(
                'layout spec w optional new-line as new-line',
                LAYOUT_SPEC__OPTIONAL_NEW_LINE_AS_NEW_LINE,
            ),
        ]

        tok_seq = TokenSequence.new_line()
        for case in cases:
            with self.subTest(case.name):
                # ACT #
                actual = tok_seq.layout(case.value)
                # ASSERT #
                self.assertEqual('\n', actual)
Ejemplo n.º 6
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(defs.CONTINUATION_TOKEN),
         TokenSequence.new_line(),
         self._argument_on_next_line.tokenization(),
     ])