def tokenization(self) -> TokenSequence:
     file_specs = collection.intersperse_list(
         TokenSequence.new_line(),
         [fs.tokenization() for fs in self._files])
     return TokenSequence.concat([
         TokenSequence.singleton(self.delimiter__begin),
         TokenSequence.preceded_by_optional_new_line_if_non_empty(
             TokenSequence.concat(file_specs), ),
         TokenSequence.optional_new_line(),
         TokenSequence.singleton(self.delimiter__end),
     ])
Exemple #2
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(reserved_words.PAREN_BEGIN),
         TokenSequence.optional_new_line(),
         self._element.tokenization(),
         self._end_paren()
     ])
Exemple #3
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(
             instruction_arguments.ASSIGNMENT_OPERATOR,
         ),
         self.value.tokenization(),
     ])
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(file_matcher.PROGRAM_MATCHER_NAME),
         TokenSequence.optional_new_line(),
         abstract_syntaxes.OptionallyOnNewLine(self._path_argument_position).tokenization(),
         self._program.tokenization(),
     ])
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(self._file_type),
         TokenSequence.optional_new_line(),
         self._file_name.tokenization(),
         self._contents.tokenization(),
     ])
 def _variant_tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         self.var_name.tokenization(),
         TokenSequence.optional_new_line(),
         TokenSequence.singleton(defs.ASSIGNMENT_IDENTIFIER),
         TokenSequence.optional_new_line(),
         self.value.tokenization(),
     ])
Exemple #7
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         self._path.tokenization(),
         TokenSequence.optional_new_line(),
         TokenSequence.singleton(reserved_words.COLON),
         TokenSequence.optional_new_line(),
         self._matcher.tokenization(),
     ])
Exemple #8
0
def symbol_reference_followed_by_superfluous_string_on_same_line(
    symbol_name: str = 'STRING_MATCHER_SYMBOL_NAME', ) -> StringMatcherAbsStx:
    return CustomStringMatcherAbsStx(
        TokenSequence.concat([
            symbol_tok_seq.SymbolReferenceAsEitherPlainNameOrReferenceSyntax(
                symbol_name),
            TokenSequence.singleton('superfluous')
        ]))
 def _file_matcher_tokens(self) -> TokenSequence:
     if not self._file_matcher:
         return TokenSequence.empty()
     else:
         return TokenSequence.concat([
             TokenSequence.singleton(':'),
             TokenSequence.optional_new_line(),
             self._file_matcher.tokenization(),
         ])
Exemple #10
0
 def test_fail_when_missing_end_end_paren(self):
     # ARRANGE #
     valid_string = str_abs_stx.StringLiteralAbsStx('contents')
     missing_end_paren = CustomAbsStx(
         TokenSequence.concat([
             TokenSequence.singleton('('),
             valid_string.tokenization(),
         ]))
     # ACT & ASSERT #
     parse_check.checker().check_invalid_syntax__abs_stx(
         self, OptionallyOnNewLine(missing_end_paren))
Exemple #11
0
 def test_fail_when_missing_end_end_paren(self):
     # ARRANGE #
     valid_program = ProgramOfSymbolReferenceAbsStx('PROGRAM_SYMBOL')
     missing_end_paren = CustomAbsStx(
         TokenSequence.concat([
             TokenSequence.singleton('('),
             valid_program.tokenization(),
         ])
     )
     # ACT & ASSERT #
     PARSE_CHECKER.check_invalid_syntax__abs_stx(
         self,
         OptionallyOnNewLine(missing_end_paren)
     )
Exemple #12
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(names.REPLACE_TRANSFORMER_NAME),
         TokenSequence.optional_new_line(),
         token_sequences.FollowedByOptionalNewLineIfNonEmpty(
             self.lines_filter.tokenization()
         ),
         token_sequences.FollowedByOptionalNewLineIfNonEmpty(
             token_sequences.OptionalOption.of_option_name(
                 names.PRESERVE_NEW_LINES_OPTION_NAME,
                 self.preserve_new_lines,
             )
         ),
         self.regex_token.tokenization(),
         TokenSequence.optional_new_line(),
         self.replacement_token.tokenization(),
     ])
Exemple #13
0
    def runTest(self):
        # ARRANGE #
        invalid_modification_types = [':', 2 * syntax.EXPLICIT_CREATE, 'text']

        for invalid_modification_type in invalid_modification_types:
            for file_type in abs_stx.FileType:
                missing_contents_file_spec = abs_stx.FileSpecAbsStx.of_file_type(
                    file_type, StringLiteralAbsStx('valid_file_name'),
                    abs_stx.CustomContentsAbsStx(
                        abs_stx.ContentsAbsStx.of_modification(
                            TokenSequence.singleton(invalid_modification_type),
                            CustomAbsStx.singleton(A_VALID_SYMBOL_NAME),
                        ), ))
                literal_syntax = abs_stx.LiteralFilesSourceAbsStx(
                    [missing_contents_file_spec])
                # ACT & ASSERT #
                integration_check.PARSE_CHECKER__FULL.check_invalid_syntax__abs_stx(
                    self,
                    literal_syntax,
                    sub_test_identifiers={
                        'file_type': file_type,
                        'invalid_modification_type': invalid_modification_type,
                    })
Exemple #14
0
 def of_str(value: str) -> StringMatcherAbsStx:
     return CustomStringMatcherAbsStx(TokenSequence.singleton(value))
Exemple #15
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.singleton(SectionName(self._name).syntax)
Exemple #16
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(matcher_options.EQUALS_ARGUMENT),
         TokenSequence.optional_new_line(),
         self._expected.tokenization(),
     ])
Exemple #17
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(matcher_options.EMPTY_ARGUMENT),
     ])
 def singleton(token: Token) -> AbstractSyntax:
     return CustomAbstractSyntax(TokenSequence.singleton(token))
Exemple #19
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(names.RUN_PROGRAM_TRANSFORMER_NAME),
         TokenSequence.optional_new_line(),
         self._program.tokenization(),
     ])
 def followed_by_str(first: AbstractSyntax,
                     following: str) -> AbstractSyntax:
     return SequenceAbsStx([
         first,
         CustomAbstractSyntax(TokenSequence.singleton(following)),
     ])
Exemple #21
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.singleton(self._token())
 def followed_by_superfluous(first: AbstractSyntax) -> AbstractSyntax:
     return SequenceAbsStx([
         first,
         CustomAbstractSyntax(TokenSequence.singleton('superfluous')),
     ])
Exemple #23
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(matcher.RUN_PROGRAM),
         TokenSequence.optional_new_line(),
         self._program.tokenization(),
     ])
 def preceded_by_str(first: str,
                     following: AbstractSyntax) -> AbstractSyntax:
     return SequenceAbsStx([
         CustomAbstractSyntax(TokenSequence.singleton(first)),
         following,
     ])
Exemple #25
0
 def of_option_name__str_arg(
     option: OptionName,
     value: str,
 ) -> 'OptionWMandatoryValue':
     return OptionWMandatoryValue.of_option_name(
         option, TokenSequence.singleton(value))
Exemple #26
0
 def _operator_tok_seq(self) -> TokenSequence:
     return (TokenSequence.sequence(
         (tokens.OPTIONAL_NEW_LINE, self._operator,
          tokens.OPTIONAL_NEW_LINE))
             if self._allow_elements_on_separate_lines else
             TokenSequence.singleton(self._operator))
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(defs.CONTINUATION_TOKEN),
         TokenSequence.new_line(),
         self._argument_on_next_line.tokenization(),
     ])
Exemple #28
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(self._name),
         self._argument.tokenization(),
     ])
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(logic.NOT_OPERATOR_NAME),
         self.operand.tokenization(),
     ])
Exemple #30
0
 def tokenization(self) -> TokenSequence:
     return TokenSequence.concat([
         TokenSequence.singleton(logic.NOT_OPERATOR_NAME),
         TokenSequence.optional_new_line(),
         self._matcher.tokenization(),
     ])