Ejemplo n.º 1
0
 def tokenization(self) -> TokenSequence:
     file_specs = collection.intersperse_list(
         TokenSequence.new_line(),
         [fs.tokenization() for fs in self._files])
     return TokenSequence.concat([
         TokenSequence.singleton(self.delimiter__begin),
         TokenSequence.preceded_by_optional_new_line_if_non_empty(
             TokenSequence.concat(file_specs), ),
         TokenSequence.optional_new_line(),
         TokenSequence.singleton(self.delimiter__end),
     ])
Ejemplo n.º 2
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(file_matcher.PROGRAM_MATCHER_NAME),
         TokenSequence.optional_new_line(),
         abstract_syntaxes.OptionallyOnNewLine(self._path_argument_position).tokenization(),
         self._program.tokenization(),
     ])
Ejemplo n.º 3
0
 def tokenization(self) -> TokenSequence:
     element_tokens = self._syntax.tokenization()
     return (TokenSequence.empty()
             if not element_tokens else TokenSequence.concat([
                 TokenSequence.optional_new_line(),
                 element_tokens,
             ]))
Ejemplo n.º 4
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(reserved_words.PAREN_BEGIN),
         TokenSequence.optional_new_line(),
         self._element.tokenization(),
         self._end_paren()
     ])
Ejemplo n.º 5
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.optional_new_line(),
         self._expectation_type_tokens(),
         self._path.tokenization(),
         self._file_matcher_tokens()
     ])
Ejemplo n.º 6
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(
             instruction_arguments.ASSIGNMENT_OPERATOR,
         ),
         self.value.tokenization(),
     ])
Ejemplo n.º 7
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(self._file_type),
         TokenSequence.optional_new_line(),
         self._file_name.tokenization(),
         self._contents.tokenization(),
     ])
Ejemplo n.º 8
0
 def _transformation_tokenizer(transformer: TokenSequence) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.new_line(),
         token_sequences.OptionWMandatoryValue.of_option_name(
             syntax_elements.WITH_TRANSFORMED_CONTENTS_OPTION_NAME,
             transformer,
         ),
     ])
Ejemplo n.º 9
0
 def _stdin_tokenizer(string_source: TokenSequence) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.new_line(),
         token_sequences.OptionWMandatoryValue.of_option_name(
             syntax_elements.STDIN_OPTION_NAME,
             string_source,
         ),
     ])
Ejemplo n.º 10
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         self._pgm_and_args.tokenization(),
         OptionalAbsStx(self._stdin,
                        self._stdin_tokenizer).tokenization(),
         OptionalAbsStx(self._transformation,
                        self._transformation_tokenizer).tokenization(),
     ])
Ejemplo n.º 11
0
 def _variant_tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         self.var_name.tokenization(),
         TokenSequence.optional_new_line(),
         TokenSequence.singleton(defs.ASSIGNMENT_IDENTIFIER),
         TokenSequence.optional_new_line(),
         self.value.tokenization(),
     ])
Ejemplo n.º 12
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         self._path.tokenization(),
         TokenSequence.optional_new_line(),
         TokenSequence.singleton(reserved_words.COLON),
         TokenSequence.optional_new_line(),
         self._matcher.tokenization(),
     ])
Ejemplo n.º 13
0
def symbol_reference_followed_by_superfluous_string_on_same_line(
    symbol_name: str = 'STRING_MATCHER_SYMBOL_NAME', ) -> StringMatcherAbsStx:
    return CustomStringMatcherAbsStx(
        TokenSequence.concat([
            symbol_tok_seq.SymbolReferenceAsEitherPlainNameOrReferenceSyntax(
                symbol_name),
            TokenSequence.singleton('superfluous')
        ]))
Ejemplo n.º 14
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.sequence([
             instruction_arguments.ASSIGNMENT_OPERATOR,
             layout.OPTIONAL_NEW_LINE,
         ]),
         self.value.tokenization(),
     ])
Ejemplo n.º 15
0
 def _file_matcher_tokens(self) -> TokenSequence:
     if not self._file_matcher:
         return TokenSequence.empty()
     else:
         return TokenSequence.concat([
             TokenSequence.singleton(':'),
             TokenSequence.optional_new_line(),
             self._file_matcher.tokenization(),
         ])
Ejemplo n.º 16
0
 def of_modification(
     modification: TokenSequence,
     contents: AbstractSyntax,
 ) -> TokenSequence:
     return TokenSequence.concat([
         modification,
         TokenSequence.optional_new_line(),
         contents.tokenization()
     ])
Ejemplo n.º 17
0
 def runTest(self):
     tokens_s = ['S1', 'S2']
     tokens_t = ['T1', 'T2']
     concat = TokenSequence.concat([
         _ConstTokSeqTestImpl(tokens_s),
         _ConstTokSeqTestImpl(tokens_t),
     ])
     actual = concat.tokens
     self.assertEqual(tokens_s + tokens_t, actual)
Ejemplo n.º 18
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         self.transformed.tokenization(),
         TokenSequence.optional_new_line(),
         token_sequences.OptionWMandatoryValue.of_option_name(
             string_transformer.WITH_TRANSFORMED_CONTENTS_OPTION_NAME,
             self.transformer.tokenization(),
         ),
     ])
Ejemplo n.º 19
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.optional_new_line(),
         OptionWMandatoryValue.of_option_name(
             OUTPUT_FROM_PROGRAM_OPTION_NAME,
             self._program.tokenization(),
         ),
         TokenSequence.new_line(),
         self._expectation.tokenization(),
     ])
Ejemplo n.º 20
0
 def test_fail_when_missing_end_end_paren(self):
     # ARRANGE #
     valid_string = str_abs_stx.StringLiteralAbsStx('contents')
     missing_end_paren = CustomAbsStx(
         TokenSequence.concat([
             TokenSequence.singleton('('),
             valid_string.tokenization(),
         ]))
     # ACT & ASSERT #
     parse_check.checker().check_invalid_syntax__abs_stx(
         self, OptionallyOnNewLine(missing_end_paren))
Ejemplo n.º 21
0
 def test_fail_when_missing_end_end_paren(self):
     # ARRANGE #
     valid_program = ProgramOfSymbolReferenceAbsStx('PROGRAM_SYMBOL')
     missing_end_paren = CustomAbsStx(
         TokenSequence.concat([
             TokenSequence.singleton('('),
             valid_program.tokenization(),
         ])
     )
     # ACT & ASSERT #
     PARSE_CHECKER.check_invalid_syntax__abs_stx(
         self,
         OptionallyOnNewLine(missing_end_paren)
     )
Ejemplo n.º 22
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(names.REPLACE_TRANSFORMER_NAME),
         TokenSequence.optional_new_line(),
         token_sequences.FollowedByOptionalNewLineIfNonEmpty(
             self.lines_filter.tokenization()
         ),
         token_sequences.FollowedByOptionalNewLineIfNonEmpty(
             token_sequences.OptionalOption.of_option_name(
                 names.PRESERVE_NEW_LINES_OPTION_NAME,
                 self.preserve_new_lines,
             )
         ),
         self.regex_token.tokenization(),
         TokenSequence.optional_new_line(),
         self.replacement_token.tokenization(),
     ])
Ejemplo n.º 23
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(matcher_options.EMPTY_ARGUMENT),
     ])
Ejemplo n.º 24
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.optional_new_line(),
         self._expectation.tokenization(),
     ])
Ejemplo n.º 25
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat(
         [argument.tokenization() for argument in self._arguments])
Ejemplo n.º 26
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(defs.CONTINUATION_TOKEN),
         TokenSequence.new_line(),
         self._argument_on_next_line.tokenization(),
     ])
Ejemplo n.º 27
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(matcher.RUN_PROGRAM),
         TokenSequence.optional_new_line(),
         self._program.tokenization(),
     ])
Ejemplo n.º 28
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(matcher_options.EQUALS_ARGUMENT),
         TokenSequence.optional_new_line(),
         self._expected.tokenization(),
     ])
Ejemplo n.º 29
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(names.RUN_PROGRAM_TRANSFORMER_NAME),
         TokenSequence.optional_new_line(),
         self._program.tokenization(),
     ])
Ejemplo n.º 30
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         self._type_and_sym_name_tokens(),
         abstract_syntaxes.AssignmentOfOptionalValue(
             self.value).tokenization(),
     ])