def _expectation_type_tokens(self) -> TokenSequence: return (TokenSequence.empty() if self._expectation_type is ExpectationType.POSITIVE else TokenSequence.sequence([ logic.NOT_OPERATOR_NAME, layout.OPTIONAL_NEW_LINE, ]))
def tokenization(self) -> TokenSequence: return TokenSequence.concat([ TokenSequence.sequence([ instruction_arguments.ASSIGNMENT_OPERATOR, layout.OPTIONAL_NEW_LINE, ]), self.value.tokenization(), ])
def tokenization(self) -> TokenSequence: return TokenSequence.sequence([ layout.OPTIONAL_NEW_LINE, reserved_words.PAREN_BEGIN, self._plain_expt, layout.OPTIONAL_NEW_LINE, reserved_words.PAREN_END, ])
def test_invalid_syntax(self): syntax = CustomAbsStx( TokenSequence.sequence([ here_doc.here_doc_start_token('marker'), '\n', 'contents', '\n', 'non_marker', ])) CHECKER.check_invalid_syntax( self, equivalent_source_variants__for_full_line_expr_parse__s__nsc, syntax, )
def tokenization(self) -> TokenSequence: return TokenSequence.sequence([self._doc_token(), layout.NEW_LINE_IF_NOT_FIRST_OR_LAST])
def _type_and_sym_name_tokens(self) -> TokenSequence: return TokenSequence.sequence([ ANY_TYPE_INFO_DICT[self.value_type].identifier, self.symbol_name, ])
def tokenization(self) -> TokenSequence: return TokenSequence.sequence([ logic.CONSTANT_MATCHER, logic.BOOLEANS[self.constant], ])
def _operator_tok_seq(self) -> TokenSequence: return (TokenSequence.sequence( (tokens.OPTIONAL_NEW_LINE, self._operator, tokens.OPTIONAL_NEW_LINE)) if self._allow_elements_on_separate_lines else TokenSequence.singleton(self._operator))
def sequence(tokens: Sequence[Token]) -> AbstractSyntax: return CustomAbstractSyntax(TokenSequence.sequence(tokens))
def _end_paren(self) -> TokenSequence: return (TokenSequence.sequence([ layout.NEW_LINE, reserved_words.PAREN_END ]) if self._end_paren_on_separate_lines else TokenSequence.sequence( [layout.OPTIONAL_NEW_LINE, reserved_words.PAREN_END]))
def runTest(self): tokens = ['T1', 'T2'] actual = TokenSequence.sequence(tokens).tokens self.assertEqual(tokens, actual)
def w_superfluous() -> StringSourceAbsStx: return CustomStringSourceAbsStx( TokenSequence.concat([ StringSourceOfStringAbsStx.of_str('valid').tokenization(), TokenSequence.sequence('superfluous'), ]))