def deep_property(path): """Creates a :func:`pydash.collections.pluck` style function, which returns the key value of a given object. Args: key (mixed): Key value to fetch from object. Returns: function: Function that returns object's key value. Example: >>> deep_property('a.b.c')({'a': {'b': {'c': 1}}}) 1 >>> deep_property('a.1.0.b')({'a': [5, [{'b': 1}]]}) 1 >>> deep_property('a.1.0.b')({}) is None True See Also: - :func:`deep_property` (main definition) - :func:`deep_prop` (alias) .. versionadded:: 1.0.0 """ return lambda obj: pyd.deep_get(obj, path)
def on_data(self, data): tweet = json.loads(data) tweet_id = tweet.get('id_str') text = tweet.get('text') hashtags = [ x.get('text') for x in pydash.deep_get(tweet, 'entities.hashtags', []) ] urls = list( filter(lambda x: bool(x), [ x.get('expanded_url') for x in pydash.deep_get(tweet, 'entities.urls', []) ])) print('{}: {}'.format(tweet_id, text)) print(hashtags) print(urls) if urls: for url in urls: print('URL: {}'.format(url)) content_hub_crawl(url)
def main(): from docopt import docopt arguments = docopt( __doc__.format(cmd=__file__, now=datetime.now().replace(second=0, microsecond=0).isoformat())) time_range = int(arguments['--range']) metrics = arguments['<metric>'] or ['CPL.avg5'] end = iso8601.parse_date(arguments['--end'], default_timezone=None) begin = end - timedelta(hours=time_range) reader = AtopReader(arguments['--path'], arguments['--cmd']) with AtopParser(begin, end) as parser: for log_file in reader.atop_log_files(begin, end): for line in log_file: parser.add_line(line.decode()) if not len(parser.result): sys.stderr.write('empty result\n') sys.exit(1) elif arguments['metrics']: for metric in parser.available_metric_paths: print(metric) elif arguments['table']: from tabulate import tabulate print( tabulate([[time] + [py_.get(value, metric) for metric in metrics] for time, value in parser.result.items()], ['time'] + metrics, tablefmt="plain")) elif arguments['json']: from json import dumps print( dumps({ time.isoformat(): {metric: py_.get(value, metric) for metric in metrics} for time, value in parser.result.items() })) elif arguments['csv']: import csv writer = csv.writer(sys.stdout) writer.writerow(['time'] + metrics) for time, value in parser.result.items(): writer.writerow([time.isoformat()] + [py_.get(value, metric) for metric in metrics]) elif arguments['gnuplot']: for metric in metrics: width = int(arguments['--width']) height = int(arguments['--height']) process = subprocess.Popen(["gnuplot"], stdin=subprocess.PIPE) process.stdin.write(b"set term dumb %d %d \n" % (width, height)) process.stdin.write(b"unset border \n") process.stdin.write(b"unset ytics \n") process.stdin.write(b"unset xtics \n") process.stdin.write(b"set xtics nomirror \n") process.stdin.write(b"unset key \n") process.stdin.write(b"set xdata time \n") process.stdin.write(b"set format x '%H' \n") process.stdin.write(b"set timefmt '%Y-%m-%dT%H:%M:%S' \n") process.stdin.write(b"set datafile sep '\t' \n") process.stdin.write( b"plot '-' using 1:2 notitle with linespoints \n") for time, value in parser.result.items(): process.stdin.write( b"%s\t%s\n" % (str(time.isoformat()).encode('utf-8'), str(py_.get(value, metric)).encode('utf-8'))) process.stdin.write(b"e\n") process.stdin.flush() process.stdin.close() process.wait() elif arguments['diagram']: import diagram width = int(arguments['--width']) height = int(arguments['--height']) class DiagramOptions(object): axis = True batch = False color = False encoding = 'utf-8' function = None # None or any of diagram.FUNCTION.keys() legend = True palette = None # None or any of diagram.PALETTE.keys() reverse = False def __init__(self, **kwargs): self.__dict__.update(kwargs) for metric in metrics: engine = diagram.AxisGraph(diagram.Point((width, height)), DiagramOptions()) engine.update([ py_.deep_get(value, metric) for value in parser.result.values() ]) if hasattr(sys.stdout, 'buffer'): engine.render(sys.stdout.buffer) else: engine.render(sys.stdout)
def test_deep_get(case, expected): assert _.deep_get(*case) == expected