def runTest(self): # ARRANGE # opt_new_line = ['the', 'optional', 'new', 'line'] layout_spec = LayoutSpec(optional_new_line=opt_new_line) cases = [ NIE( 'first', input_value={TokenPosition.FIRST}, expected_value=[], ), NIE( 'last', input_value={TokenPosition.LAST}, expected_value=[], ), NIE( 'first and last', input_value={TokenPosition.FIRST, TokenPosition.LAST}, expected_value=[], ), NIE( 'neither first nor last', input_value=set(), expected_value=['\n'], ), ] for case in cases: with self.subTest(case.name): # ACT # actual = sut.NEW_LINE_IF_NOT_FIRST_OR_LAST.layout( layout_spec, case.input_value) # ASSERT # self.assertEqual(case.expected_value, actual)
def of_lines(on_separate_lines: Sequence[AbstractSyntax]) -> str: layout_spec = LayoutSpec.of_default() return misc_formatting.lines_content([ syntax.tokenization().layout(layout_spec) for syntax in on_separate_lines ])
def runTest(self): # ARRANGE # opt_new_line = ['the', 'optional', 'new', 'line'] layout_spec = LayoutSpec(optional_new_line=opt_new_line) # ACT # actual = sut.OPTIONAL_NEW_LINE.layout(layout_spec, {}) # ASSERT # self.assertEqual(opt_new_line, actual)
def check__abs_stx( self, put: unittest.TestCase, source: AbstractSyntax, arrangement: Arrangement, expectation_: Expectation[T], layout: LayoutSpec = LayoutSpec.of_default(), ): source = remaining_source(source.tokenization().layout(layout)) _ParseAndExecutionChecker(put, arrangement, expectation_).execute(self.parser, source)
def check__abs_stx( self, put: unittest.TestCase, source: AbstractSyntax, input_: INPUT, arrangement: Arrangement, expectation: Expectation[PRIMITIVE, OUTPUT], layout_: LayoutSpec = LayoutSpec.of_default(), ): checker = _ParseAndExecutionChecker( put, input_, self._parser, arrangement, self._configuration, self._check_application_result_with_tcds, expectation) checker.check(remaining_source(source.tokenization().layout(layout_)))
def runTest(self): # ARRANGE # transformer = StringTransformerPrimitiveSymbolContext( 'STRING_TRANSFORMER', string_transformers.to_uppercase()) after_bin_op = 'after bin op' after_bin_op_syntax = CustomStringTransformerAbsStx.of_str( after_bin_op) composition_string_transformer = StringTransformerCompositionAbsStx( [transformer.abstract_syntax, after_bin_op_syntax], within_parens=False, allow_elements_on_separate_lines=False, ) expected_source_after_parse = asrt_source.is_at_line( current_line_number=2, remaining_part_of_current_line=' '.join( [composition_string_transformer.operator_name(), after_bin_op]), ) for pgm_and_args_case in pgm_and_args_cases.cases_w_and_wo_argument_list__including_program_reference( ): command_followed_by_transformer = FullProgramAbsStx( pgm_and_args_case.pgm_and_args, transformation=composition_string_transformer, ) symbols = list(pgm_and_args_case.symbols) + [transformer] with self.subTest(command=pgm_and_args_case.name): source = remaining_source( command_followed_by_transformer.tokenization().layout( LayoutSpec.of_default())) # ACT & ASSERT # CHECKER_WO_EXECUTION.check( self, source, None, pgm_and_args_case.mk_arrangement( SymbolContext.symbol_table_of_contexts(symbols)), Expectation( ParseExpectation( source=expected_source_after_parse, symbol_references=SymbolContext. references_assertion_of_contexts(symbols), ), primitive=lambda env: (asrt_pgm_val.matches_program( asrt_command. matches_command(driver=pgm_and_args_case. expected_command_driver(env), arguments=asrt.is_empty_sequence), stdin=asrt_pgm_val.is_no_stdin(), transformer=asrt.matches_singleton_sequence( asrt.is_(transformer.primitive)), ))))
def of( name: str, executable_file: PathAbsStx, arguments: Sequence[ArgumentAbsStx], expectation: ExpectationOnExeFile, source_after_parse: Assertion[ParseSource], ) -> 'Case': return Case( name, ProgramOfExecutableFileCommandLineAbsStx( executable_file, arguments).tokenization().layout(LayoutSpec.of_default()), expectation, source_after_parse, )
def check__abs_stx__wo_input( self, put: unittest.TestCase, source: AbstractSyntax, arrangement: Arrangement, expectation: Expectation[PRIMITIVE, OUTPUT], layout_: LayoutSpec = LayoutSpec.of_default(), ): self.check__abs_stx( put, source, None, arrangement, expectation, layout_, )
def test_string_transformer_should_be_parsed_as_simple_expression(self): the_layout = LayoutSpec.of_default() output_from_program = 'untransformed output from the program' sym_ref_program_syntax = ProgramOfSymbolReferenceAbsStx( 'PROGRAM_THAT_EXECUTES_PY_FILE') str_trans__unused = StringTransformerSymbolReferenceAbsStx( 'UNUSED_TRANSFORMER') program_w_complex_str_trans_wo_parentheses = program_abs_stx.FullProgramAbsStx( ProgramOfSymbolReferenceAbsStx(sym_ref_program_syntax.symbol_name), transformation=str_trans_abs_stx. StringTransformerCompositionAbsStx( [ TO_UPPER_TRANSFORMER_SYMBOL.abstract_syntax, str_trans__unused, ], within_parens=False, allow_elements_on_separate_lines=False, )) expected_remaining_tokens = TokenSequence.concat([ TokenSequence.singleton( str_trans_abs_stx.names.SEQUENCE_OPERATOR_NAME), str_trans__unused.tokenization(), ]) expected_remaining_source = expected_remaining_tokens.layout( the_layout) checker = integration_check.checker__w_arbitrary_file_relativities() py_program_file = File( 'program.py', py_programs.py_pgm_with_stdout_stderr_exit_code( exit_code=0, stdout_output=output_from_program, stderr_output=output_from_program, ), ) py_file_rel_conf = rel_opt.conf_rel_any(RelOptionType.REL_HDS_CASE) py_file_conf = py_file_rel_conf.named_file_conf(py_program_file.name) program_symbol__that_executes_py_file = ProgramSymbolContext.of_sdv( sym_ref_program_syntax.symbol_name, program_sdvs.interpret_py_source_file_that_must_exist( py_file_conf.path_sdv)) symbols = [ program_symbol__that_executes_py_file, TO_UPPER_TRANSFORMER_SYMBOL, ] for output_file in ProcOutputFile: for ignore_exit_code in [False, True]: syntax = string_source_abs_stx.StringSourceOfProgramAbsStx( output_file, program_w_complex_str_trans_wo_parentheses, ignore_exit_code=ignore_exit_code, ) with self.subTest(output_file=output_file, ignore_exit_code=ignore_exit_code): checker.check__abs_stx__wo_input( self, syntax, arrangement_w_tcds( symbols=SymbolContext.symbol_table_of_contexts( symbols), tcds_contents=py_file_rel_conf. populator_for_relativity_option_root( DirContents([py_program_file]))), Expectation( ParseExpectation( source=asrt_source.source_is_not_at_end( remaining_source=asrt.equals( expected_remaining_source)), symbol_references=SymbolContext. references_assertion_of_contexts(symbols), )), the_layout, )
def runTest(self): optional_new_line = '<OPTIONAL-NEW-LINE>' token_sep = '<TOKEN-SEPARATOR>' layout = LayoutSpec( optional_new_line=(optional_new_line, ), symbol_reference_as_plain_symbol_name=False, token_separator=token_sep, ) layout_formatter = StringFormatter({ 'optional_nl': optional_new_line, 'token_sep': token_sep }) cases = [ Case( 'empty', [], layout, '', ), Case( 'single empty token', [''], layout, '', ), Case( 'single white space token', [' '], layout, ' ', ), Case( 'single new-line token', ['\n'], layout, '\n', ), Case( 'single token with embedded new-line', ['before\nafter'], layout, 'before\nafter', ), Case( 'single non-empty/white space token', ['the-token'], layout, 'the-token', ), Case( '{LayoutAble} objects should be rendered using their rendering method' .format(LayoutAble=LayoutAble), [_LayoutAbleThatGivesOptionalNewLineStrings()], layout, layout_formatter.format('{optional_nl}'), ), Case( 'single empty layout-able', [_ConstLayoutAble([])], layout, '', ), Case( 'single layout-able w single empty element', [_ConstLayoutAble([''])], layout, '', ), Case( 'two tokens, should be separated by the token separator', ['t1', 't2'], layout, layout_formatter.format('t1{token_sep}t2'), ), Case( 'empty tokens should be ignored', [ '', 'T1', '', '', 'T2', '', 'T3', '', _ConstLayoutAble(['', 'T4', '']) ], layout, layout_formatter.format( 'T1{token_sep}T2{token_sep}T3{token_sep}T4'), ), Case( 'the token separator should not be be added beside new-line tokens', ['\n', 'T'], layout, '\nT', ), Case( 'the token separator should not be be added beside new-lines', ['S\n', 'T'], layout, 'S\nT', ), Case( 'the token separator should not be be added beside new-lines', ['S', 'T\n', 'U'], layout, layout_formatter.format('S{token_sep}T\nU'), ), Case( 'the token separator should not be be added beside new-lines', ['T', '\n'], layout, 'T\n', ), Case( 'the token separator should not be be added beside new-lines', ['T', '\nU'], layout, 'T\nU', ), Case( 'the token separator should not be be added beside new-lines', ['\n', '\n'], layout, '\n\n', ), Case( 'token position / single token', [ _LayoutAbleThatChecksTokenPositionArgument( self, {TokenPosition.FIRST, TokenPosition.LAST}, ''), ], layout, '', ), Case( 'token position / multiple tokens', [ _LayoutAbleThatChecksTokenPositionArgument( self, {TokenPosition.FIRST}, '', ), _LayoutAbleThatChecksTokenPositionArgument( self, set(), '', ), 'string-token', _LayoutAbleThatChecksTokenPositionArgument( self, set(), '', ), _LayoutAbleThatChecksTokenPositionArgument( self, {TokenPosition.LAST}, '', ), ], layout, 'string-token', ), ] for case in cases: with self.subTest(case=case.name, tokens=repr(case.tokens)): tok_seq = _ConstTokSeqTestImpl(case.tokens) # ACT # actual = tok_seq.layout(case.layout) # ASSERT # self.assertEqual(case.expected, actual)
def layout( self, spec: LayoutSpec, position: AbstractSet[TokenPosition], ) -> Sequence[str]: return self.values class _LayoutAbleThatChecksTokenPositionArgument(LayoutAble): def __init__(self, put: unittest.TestCase, expected: AbstractSet[TokenPosition], values: Sequence[str]): self.put = put self.expected = expected self.values = values def layout( self, spec: LayoutSpec, position: AbstractSet[TokenPosition], ) -> Sequence[str]: self.put.assertEqual(self.expected, position, 'token position') return self.values LAYOUT_SPEC__OPTIONAL_NEW_LINE_AS_EMPTY = LayoutSpec((), False, ' ') LAYOUT_SPEC__OPTIONAL_NEW_LINE_AS_NEW_LINE = LayoutSpec(('\n', ), False, ' ') if __name__ == '__main__': unittest.TextTestRunner().run(suite())
def test_string_transformer_should_be_parsed_as_simple_expression(self): # ARRANGE # the_layout = LayoutSpec.of_default() src_rel_opt_conf = conf_rel_any(RelOptionType.REL_TMP) src_file = fs.File('source-file.txt', 'contents of source file') expected_contents = src_file.contents.upper() to_upper_transformer = StringTransformerSymbolContext.of_primitive( 'TRANSFORMER_SYMBOL', string_transformers.to_uppercase(), ) str_trans__unused = StringTransformerSymbolReferenceAbsStx('UNUSED_TRANSFORMER') transformation_w_infix_op = str_trans_abs_stx.StringTransformerCompositionAbsStx( [ to_upper_transformer.abstract_syntax, str_trans__unused, ], within_parens=False, allow_elements_on_separate_lines=False, ) expected_remaining_tokens = TokenSequence.concat([ TokenSequence.singleton(str_trans_abs_stx.names.SEQUENCE_OPERATOR_NAME), str_trans__unused.tokenization(), ]) expected_remaining_source = expected_remaining_tokens.layout(the_layout) file_contents_syntax = string_source_abs_stx.TransformedStringSourceAbsStx( string_source_abs_stx.StringSourceOfFileAbsStx( src_rel_opt_conf.path_abs_stx_of_name(src_file.name) ), transformation_w_infix_op ) checker = integration_check.checker(parse_check.rel_opts_conf_of_single(src_rel_opt_conf.relativity)) # ACT & ASSERT # checker.check__abs_stx( self, file_contents_syntax, None, arrangement_w_tcds( tcds_contents=src_rel_opt_conf.populator_for_relativity_option_root( fs.DirContents([src_file]) ), symbols=to_upper_transformer.symbol_table ), Expectation.of_prim__const( parse=ParseExpectation( source=asrt_source.source_is_not_at_end( remaining_source=asrt.equals(expected_remaining_source) ), symbol_references=to_upper_transformer.references_assertion, ), primitive=asrt_string_source.pre_post_freeze__matches_str__const( expected_contents, may_depend_on_external_resources=True, ) ), the_layout )
def parse_source_of__abs_stx(syntax: AbstractSyntax) -> ParseSource: return remaining_source(syntax.tokenization().layout( LayoutSpec.of_default()))