def test_clistream_tsv(): dt = [] sys.stdin = StringIO() sys.stdin.write('A,B,C\n1,2,3\n') sys.stdin.seek(0) clistream(dt.append, delimiter=',') assert len(dt) == 2 assert dt[0] == ['A', 'B', 'C'] assert dt[1] == ['1', '2', '3']
def test_clistream(): dt = [] sys.stdin = StringIO() sys.stdin.write('A,B,C\n1,2,3\n') sys.stdin.seek(0) clistream(dt.append, lambda l: l.rstrip('\r\n')) assert len(dt) == 2 assert dt[0] == 'A,B,C' assert dt[1] == '1,2,3'
def main(output, **kwargs): # Prepare writer for given output stream pp = pprint.PrettyPrinter(indent=2, stream=output, width=100) s = Sequential() s.callback(parse) s.callback(params_todict) s.callback(pp.pprint) s.errback(logging.error) clistream(s, **kwargs)
def main(output, **kwargs): G = Graph() # Set sequential callbacks to accept input records s = Sequential() s.callback(RowMapper(FIELDS)) s.callback(G.add) s.errback(logging.error) # Consume input stream, "G" holds converted records in a graph manner clistream(s, delimiter=',', **kwargs) G.render(output)
def main(output, output_encoding, **kwargs): kml = simplekml.Kml() s = Sequential() s.callback(RowMapper(FIELDS)) s.callback(mapping) s.callback(lambda r: kml.newpoint(**r)) s.errback(logging.error) # Consume input stream, "kml" holds converted points clistream(s, delimiter=',', **kwargs) output.write(kml.kml().encode(output_encoding))
def main(config, output, **kwargs): if config is None: raise SystemExit('-c/--config option is required.') fields = json.load(config)['fields'] config.close() writer = csv.writer(output, delimiter='\t', quotechar='\t', quoting=csv.QUOTE_NONE) mapping = DictMapper(fields) s = Sequential() s.callback(parse) s.callback(mapping) s.callback(writer.writerow) s.errback(logging.error) clistream(s, **kwargs)
def main(): args = parse_arguments( files=dict(nargs='*'), skip_prefix=dict(flags='--skip-prefix', type=argparse.FileType('r'), metavar="FILE", help='Skip pattern file for prefix match'), skip_suffix=dict(flags='--skip-suffix', type=argparse.FileType('r'), metavar="FILE", help='Skip pattern file for suffix match')) # Read schema definition. if args.config is None: raise SystemExit('-c/--config option is required.') fields = json.load(args.config)['fields'] args.config.close() # Load skip pattern files if given. if args.skip_prefix: skip_prefix = [l.strip() for l in args.skip_prefix] else: skip_prefix = None if args.skip_suffix: skip_suffix = [l.strip() for l in args.skip_suffix] else: skip_suffix = None collector = Collector(skip_prefix, skip_suffix) # Set sequential callbacks to accept input records s = Sequential() s.callback(RowMapper(fields)) s.callback(collector.collect) s.errback(LOGGER.error) # Consume input stream, "G" holds converted records in a graph manner clistream(s, delimiter='\t', files=args.files) collector.dump(args.output)
def logparse(*args, **kwargs): """ Parse access log on the terminal application. If list of files are given, parse each file. Otherwise, parse standard input. :param args: supporting functions after processed raw log line :type: list of callables :rtype: tuple of (statistics, key/value report) """ from clitool.cli import clistream from clitool.processor import SimpleDictReporter lst = [parse] + args reporter = SimpleDictReporter() stats = clistream(reporter, *lst, **kwargs) return stats, reporter.report()
END = '\033[0m' args = parse_arguments(files=dict(nargs='*'), color=dict(flags="--color", action="store_true"), status=dict(flags="--status")) lst = map(int, args.status.split(',')) if args.status else None def p(e): if lst and not e['status'] in lst: return colored = False if args.color: if e['status'] >= 500: print_(RED, end='') colored = True if e['status'] >= 400: print_(PURPLE, end='') colored = True for k in sorted(e.keys()): if e[k]: print_("%-16s: %s" % (k, e[k])) if colored: print_(END, end='') print_("-" * 40) stats = clistream(p, parse, files=args.files) print_(stats) # vim: set et ts=4 sw=4 cindent fileencoding=utf-8 :
def main(config, **kwargs): cfg = cliconfig(config) session = SessionFactory(cfg['database']['url']).create() clistream(session.add, JPAddress._make, mapping, lambda e: Address(**e), delimiter=',', **kwargs) session.commit()