def work(): ps = self.producer_script far_sorted = ps.stream_for_sync_is_alphabetized_by_key_for_sync near_keyerer = ps.near_keyerer opened = ps.open_traversal_stream(listener, cached_document_path) with opened as far_dcts: stream_for_sync = ps.stream_for_sync_via_stream(far_dcts) from data_pipes.magnetics.flat_map_via_far_collection import \ flat_map_via_producer_script as func flat_map = func( stream_for_sync, stream_for_sync_is_alphabetized_by_key_for_sync=far_sorted, preserve_freeform_order_and_insert_at_end=False, build_near_sync_keyer=near_keyerer, # [#459.R] ) if do_diff: m = 'DIFF_LINES_VIA' else: m = 'NEW_LINES_VIA' with self.sync_agent.open_sync_session() as sess: if sess is None: return olines = getattr(sess, m)(flat_map, listener) for line in olines: yield line
def _do_CLI(sin, sout, serr, near_fmt, do_diff, near_coll, ps_path, efx): if 'help' == near_fmt: from data_pipes.cli import SPLAY_FORMAT_ADAPTERS__ as func return func(sout, serr) if efx: xx('oh interesting see [#605.6]') else: from data_pipes.cli import external_functions_via_stderr_ as func efx = func(serr) mon = efx.produce_monitor() sout_lines = _stdout_lines_from_sync(near_coll, ps_path, mon.listener, do_diff, near_fmt) for line in sout_lines: sout.write(line) return mon.exitstatus
def resolve_near_collection(): from data_pipes import meta_collection_ as func mcoll = func() near_coll = mcoll.collection_via_path(collection_path=near_coll_path, format_name=near_format, opn=opn, listener=listener) set_or_stop('near_coll', near_coll)
def TERMINAL_PARSE_ARGS(serr, bash_argv): prog_name = bash_argv.pop() from data_pipes.cli import formals_via_ as func foz = func(_formals(), lambda: prog_name) vals, rc = foz.terminal_parse(serr, bash_argv) if vals is None: return None, None, rc return vals, foz, None # track [#459.O]
def funky(same_schema, in_ents): func = _work_module().entities_and_statser_via_entities_and_query out_ents, statser = func(in_ents, query) class summary: # #class-as-namespace def to_lines(): return _summarize_search_stats(statser()) return same_schema, out_ents, lambda: summary
def NONTERMINAL_PARSE_ARGS(serr, bash_argv): prog_name = bash_argv.pop() from data_pipes.cli import formals_via_ as func foz = func(_formals(), lambda: prog_name) vals, rc = foz.nonterminal_parse(serr, bash_argv) # #track [#459.O]: resulting in *three* args below if vals is None: return None, None, rc return vals, foz, None
def normalize(fmt, arg, sin_sout, o): from data_pipes.cli import normalize_collection_reference_ as func return func(sin_sout, fmt, arg, o.STDIN_STDOUT, o.arg, error)
def BUILD_COLLECTION_MAPPER(stderr, vals, foz, rscser): """Harness the power of pipes…""" # .. """EXPERIMENTAL. Created as a cheap-and-easy way to create and populate a collection with a producer script or similar. With FROM_COLLECTION of "-", lines of the indicated format are read from STDIN. With TO_COLLECTION of "-", lines of the specified format are written to STDOUT. Only certain formats are available for certain cases; for example an output of "-" is available only for single-file formats. At writing the only participating formats are CSV and json.. """ if vals.get('help'): doc = BUILD_COLLECTION_MAPPER.__doc__ rc = foz.write_help_into(stderr, doc) return None, rc to_collection = vals.pop('to_collection') to_format_default = None if '-' == to_collection: to_format_default = 'json' # experiment to_format = vals.pop('to_format', to_format_default) assert not vals def collection_mapper_via_sout(sout): def map_collection(schema, ents): try: return do_map_collection(sout, schema, ents) except stop: return 9876 return map_collection def do_map_collection(sout, schema, ents): with open_the_output_collection_for_writing(sout) as receiver: receiver.receive_schema_and_entities(schema, ents, listener) return mon.returncode def open_the_output_collection_for_writing(stdout): fmt, x = normalize(to_format, to_collection, stdout, _to_monikers) coll = resolve_collection(fmt, x) return coll.open_collection_to_write_given_traversal(throwing_listener) def resolve_collection(fmt, arg): return collib.collection_via_path(arg, throwing_listener, format_name=fmt) def normalize(fmt, arg, sin_sout, o): from data_pipes.cli import normalize_collection_reference_ as func return func(sin_sout, fmt, arg, o.STDIN_STDOUT, o.arg, error) # == Listeners and related def throwing_listener(sev, *rest): listener(sev, *rest) if 'error' == sev: raise stop() def error(msg): stderr.write(''.join((msg, '\n'))) # ich muss sein [#605.2] raise stop() class stop(RuntimeError): pass # == Smalls and go! from data_pipes import meta_collection_ as func collib = func() mon = rscser().produce_monitor() listener = mon.listener return collection_mapper_via_sout, None
def producer_script_normally(): from data_pipes.format_adapters.producer_script import \ producer_script_module_via_path_ as func return func(producer_script, listener)
def resolve_query_complexly(): func = _work_module().prepare_query self.query = func(self.query_pieces, throwing_listener)