def do_build_end_state_for_etc(self, eid): listener, emis = listener_and_emissions_for(self) bcoll = self.given_business_collection() ad = self.given_adapter_module() direcs = ad.generate_markdown(bcoll, listener, NCID=eid) yield 'directives', tuple(direcs) yield 'emissions', emis
def end_state(self): listener, emissions = listener_and_emissions_for(self) bcoll = self.fake_collection big_index = bcoll.build_big_index_(listener) output_lines_via_big_index = subject_function_for_ASCII() olines = output_lines_via_big_index(big_index) return tuple(olines), tuple(emissions)
def __init__(self, tc): listener, emissions = em.listener_and_emissions_for(tc, limit=1) tox = tc.given_tokens() from tag_lyfe.magnetics import query_via_token_stream as mag query_s = '\0'.join(tox) # NULL_BYTE_ itr = mag.MAKE_CRAZY_ITERATOR_THING(query_s) next(itr) # ignore the model unsani = next(itr) x = unsani.sanitize(listener) if len(emissions): emi, = emissions self.first_emission_messages = tuple(emi.payloader()) else: self.first_emission_messages = None if x is None: self.result = None else: self.result = x
def build_end_state(self): def when_chim_churry_is(): yield condition_one, no_see yield condition_two, no_see yield condition_three, no_see def condition_one(): pass def condition_two(): pass def condition_three(): pass def no_see(): self.fail("no see") listener, emis = em.listener_and_emissions_for(self) case = subject_fellow()(listener) val = case(when_chim_churry_is) assert val is None emi, = emis return emi.to_messages()
def case_merge_fails_with_message_that_includes(self, needle): listener, emis = listener_and_emissions_for(self) rv = self.run_merge_the_two_cases(listener) emi, = emis lines = tuple(emi.payloader()) # multiple lines ok self.assertIn(needle, lines[0]) self.assertIsNone(rv)
def expect_input_error_structure(self): listener, emissions = ts.listener_and_emissions_for(self, limit=1) self.assertIsNone(self.execute(listener)) emi, = emissions chan = emi.channel self.assertSequenceEqual(chan, ('error', 'structure', 'input_error')) return emi.payloader()
def big_patchfile(self): import modality_agnostic.test_support.common as em listener, emissions = em.listener_and_emissions_for(self) busi_coll = business_collection_one() # .. bpf = self.given_work_that_produces_big_patchfile(busi_coll, listener) assert (not len(emissions)) return bpf
def end_state(self): seen_attribute_keys = {} entity_dcts = [] import modality_agnostic.test_support.common as em listener, _emissions = em.listener_and_emissions_for(self, limit=None) _ = _subject_module().open_traversal_stream( listener=listener, html_document_path=html_fixture('0130-tag-subtree.html')) with _ as dcts: for dct in dcts: for k in dct.keys(): seen_attribute_keys[k] = None entity_dcts.append(dct) # (we can't assine lvars to lvalues of the same name in a class) seen_attribute_keys_ = tuple(sorted(seen_attribute_keys.keys())) class State: # #class-as-namespace seen_attribute_keys = seen_attribute_keys_ business_objects = tuple(entity_dcts) emissions = tuple(_emissions) return State
def _item_count_and_only_emission(self): listener, emissions = em.listener_and_emissions_for(self, limit=1) itr = self._iterator_via_run(listener) item_count = 0 for _ in itr: item_count += 1 emi, = emissions return item_count, emi.channel, emi.payloader
def end_state_emission(self): import modality_agnostic.test_support.common as em listener, emissions = em.listener_and_emissions_for(self) path, func = self.given_path_and_fixture() x = subject_function(path, listener=listener, opn=func) emi, = emissions assert x is None return emi
def _fail_against(self, s): listener, emissions = em.listener_and_emissions_for(self, limit=1) coll = _build_collection(s, _yes_cheat, listener) assert (coll is None) emi, = emissions msgs = tuple(emi.payloader()) msg, = msgs # assertion return msg
def go(self, rc): dct = {k: v for k, v in self.given_request_dictionary()} listener, emis = listener_and_emissions_for(self) from pho.generation_service_.run_message_broker_via_config import \ _response_dict_via_request_dict as func dct = func(dct, listener) if self.do_debug: print(f"DBG: {dct!r}") self.assertEqual(dct['status'], rc)
def end_state(self): def when_two_letters_is(): yield 'AA', lambda: rec.append('aa') yield 'BB', lambda: rec.append('bb') listener, emis = em.listener_and_emissions_for(self) case = subject_fellow()(listener) rec = [] x = case('CC', when_two_letters_is) return EndState(tuple(emis), tuple(rec), x)
def _failure_message_via_line(tc, upstream_s): import modality_agnostic.test_support.common as em listener, emissions = em.listener_and_emissions_for(tc, limit=1) x = subject_function()(line=upstream_s, listener=listener) assert (x is None) emi, = emissions assert (emi.channel == ('error', 'expression')) line1, = emi.payloader() return line1
def run_expecting_failure(self, error_case_name): import modality_agnostic.test_support.common as em listener, emissions = em.listener_and_emissions_for(self, limit=1) x = self._do_run(listener, self.given_tokens()) self.assertIsNone(x) emi, = emissions channel, payloader = (emi.channel, emi.payloader) expect = ('error', 'structure', 'parse_error', error_case_name) self.assertSequenceEqual(expect, channel) return channel, payloader
def test_rumskalla(self): listener, emissions = em.listener_and_emissions_for(self, limit=2) line = "|I don't|have an|endcap\n" lines = (line,) with subject_function()(lines, listener) as ents: assert ents is None emi, *_ = emissions actual = emi.to_messages() expected = ('header row 1 must have "endcap" (trailing pipe)',) self.assertSequenceEqual(actual, expected)
def build_end_state(self): import modality_agnostic.test_support.common as em listener, emissions = em.listener_and_emissions_for(self) ncs = notecards_via_collection(self.collection()) def perform(eid, cud_tups): entity_identifier_tup = ('update_entity', eid) return ncs._prepare_edit(entity_identifier_tup, cud_tups, listener) edits = self.perform(perform) return EndState(edits, tuple(emissions))
def you_must_choose_one(self): listener, emis = listener_and_emissions_for(self) bcoll = business_collection_ONE() # .. wow = self.given_adapter_module().generate_markdown(bcoll, listener) res = tuple(wow) assert 'adapter_error' == res[0][0] emi, = emis assert 'multiple_node_trees' == emi.channel[-1] act = emi.to_messages() exp = 'Multiple node trees, choose one:', 'A, B, C.' self.assertSequenceEqual(act, exp)
def end_state(self): listener, emissions = em.listener_and_emissions_for(self, limit=1) rv = self.given_run(listener) dct = {} if len(emissions): dct['did_emit'] = True emi, = emissions chan, pay = emi.channel, emi.payloader dct['channel'] = chan dct['payload'] = pay() else: dct['did_emit'] = False dct['result_value'] = rv return dct
def channel_and_lines(self): def run(listener): return _subject_function()( data_source={ 'no': 'see' }, template_big_string="hello $fn $ln\n", data_source_key_via_template_variable_name=lambda x: f'Q{x}', # noqa: E501 listener=listener) listener, emissions = em.listener_and_emissions_for(self, limit=1) self.assertIsNone(run(listener)) emi, = emissions return emi.channel, tuple(emi.payloader())
def _execute_while_listening(self, **kwargs): _coll = self._collection() _needle = self._needle_function() listener, emissions = em.listener_and_emissions_for(self, limit=1) _x = subject_module().key_and_entity_via_collection( collection=_coll, needle_function=_needle, listener=listener, **kwargs) emi, = emissions msgs = tuple(emi.payloader()) return (_x, msgs)
def build_end_state(self): # These uqt = self.given_user_query_tokens() sb = self.given_sort_by() # Prepare the readmes (just busy-work to accord with the batch feat.) def opn(path): assert 'pretend-readme.md' == path assert opn.called_zero_times opn.called_zero_times = False tail_lines = self.given_entity_lines() all_lines = self.given_all_lines() use_lines = lines_via(tail_lines, all_lines) return nullcontext(use_lines) opn.called_zero_times = True from contextlib import nullcontext readmes = nullcontext(('pretend-readme.md', )) # Listener boilerplate listener, emissions = listener_and_emissions_for(self) # Execute from pho._issues import records_via_query_ as func itr = func(opened=readmes, user_query_tokens=uqt, sort_by=sb, do_batch=None, opn=opn, listener=listener) future = next(itr) # #provision [#883.E] if future is None: result_records_tuple = None else: result_records_tuple = tuple(itr) return EndState(tuple(emissions), result_records_tuple)
def end_state(self): import modality_agnostic.test_support.common as em use_listener, emissions = em.listener_and_emissions_for( self, limit=None) # noqa: E501 sections = [] def store_previous_initially(): state.store_previous = store_previous_normally class state: # #class-as-namespace store_previous = store_previous_initially def store_previous_normally(): sections.append(section) class _Section: def __init__(self, s): self.item_strings = [] self.header_content = s _cm = _subject_module().open_traversal_stream( listener=use_listener, html_document_path=html_fixture('0120-real-subtree.html')) with _cm as dcts: for dct in dcts: if 'header_level' in dct: state.store_previous() section = _Section(dct['header_content']) else: section.item_strings.append(dct['text']) store_previous_normally() class _State: def __init__(self, em_tup, sect_tup): self.emissions = em_tup self.sections = sect_tup return _State(tuple(emissions), tuple(sections))
def end_state_keys_and_values(self): listener, emis = listener_and_emissions_for(self) # Hack a builder function that you inspect the result from def fsr(ci): my_reader = sm.Caching_FS_Reader_(ci, max_num_lines_to_cache=mn) yuck.append(my_reader) yuck.append(self.take_snapshot_before(my_reader)) return my_reader yuck = [] mn = self.given_max_num_lines_to_cache() sm = subject_module() # Build the collection coll_path = this_one_dir() from kiss_rdb.storage_adapters_.eno import \ mutable_eno_collection_via as func coll = func(coll_path, fsr=fsr) rest = () if (eid := self.given_EID()) is None else (eid, ) for k, v in self.given_performance(listener, coll, *rest): yield k, v my_reader, before = yuck after = self.take_snapshot_after(my_reader) do_BA = (before is not None or after is not None) if do_BA: yield 'before', before if 0 != len(emis): yield 'emissions', tuple(emis) if do_BA: yield 'after', after
def build_end_state(self): # The arguments to the SUT function: 1) coll path 2) listener 3) opn # Prepare the path argument # rather than memoize the filesystem for every case just for the # one case that does this one thing, we do this fs = self.given_fake_filesystem() assert fs if hasattr(self, 'given_path'): path = self.given_path() else: path = fs.first_path # Prepare the listener argument listener, emissions = em.listener_and_emissions_for(self, limit=1) # Prepare the `opn` argument def opn(path_arg, mode=None): if mode: msg = "we assume you aren't actually writing.." return pass_thru_cm((msg, path)) return fs.open_file_for_reading(path_arg) # Execute rv = self.given_run(listener, path, opn) # Finish by assembling the result structure dct = {'end_state_result': rv} if len(emissions): dct['did_emit'] = True emi, = emissions dct['channel'], pay = emi.channel, emi.payloader dct['payload'] = pay() else: dct['did_emit'] = False return dct
def build_end_state(self): listener, emissions = listener_and_emissions_for(self) GV_lines = unindent_big_string( self.given_graph_viz_indented_big_string()) # noqa: E501 DB_lines = unindent_big_string( self.given_sqlite_schema_indented_big_string()) # noqa: E501 from kiss_rdb.storage_adapters.sqlite3.connection_via_graph_viz_lines \ import _abs_schema_via_graph_viz as GV_abs_sch_via from kiss_rdb.storage_adapters_.sqlite3._abstract_schema_to_and_fro \ import abstract_schema_via_sqlite_SQL_lines as DB_abs_sch_via GV_absch = GV_abs_sch_via(GV_lines, listener=None) DB_absch = DB_abs_sch_via(DB_lines) d = DB_absch.schema_diff_to(GV_absch) if d is None: return (), emissions from kiss_rdb.storage_adapters.sqlite3.connection_via_graph_viz_lines \ import _SQL_lineses as subject_function res = subject_function(d, '/pretend/db.sqlite3', pretend_FH_just_for_name, listener, create_tables_if_not_exist=True, strange_tables_are_OK=False) if hasattr(res, '__next__'): # (it's a "lineses") res = tuple(line for lines in res for line in lines) return res, emissions
def build_end_state(self): listener, emissions = listener_and_emissions_for(self) from tag_lyfe.magnetics.query_via_token_stream import \ EXPERIMENTAL_NEW_WAY as func res = func(self.given_tokens(), listener) return EndState(tuple(emissions), res)
def failure_triple_given_run(self): listener, emissions = em.listener_and_emissions_for(self, limit=1) given_lines = self.given_lines() x = _doc_entity_via_lines(given_lines, listener) emi, = emissions return (x, emi.channel, emi.payloader)
def build_state_expecting_some_emssions(self, path): listener, emissions = em.listener_and_emissions_for(self, limit=1) attr_dcts, k = self.my_run(path, listener) emi, = emissions return EndState(attr_dcts, k, ((emi.channel, emi.payloader), ))
def perform(self): start_eid = self.given_start_node() listener, emis = listener_and_emissions_for(self) ncs = self.given_notecards_collection() nodes = nodes_in_order_via(start_eid, ncs, listener) return (nodes and tuple(nodes)), (tuple(emis) if emis else None)