Exemplo n.º 1
0
def summary(request):

    P = 15

    def fmt_ms(s):
        return ('{:.3f}ms'.format(1000 * s)).rjust(P)

    r = http.HttpResponse()
    r.write(''.ljust(P))
    r.write('TIMES'.rjust(P))
    r.write('AVERAGE'.rjust(P))
    r.write('MEDIAN'.rjust(P))
    r.write('STDDEV'.rjust(P))
    r.write('\n')
    avgs = []
    medians = []
    for CACHE in settings.CACHE_NAMES:
        data = caches[CACHE].get('benchmarking')
        if data is None:
            r.write('Nothing for {}\n'.format(CACHE))
        else:
            # Always chop off the first 10 measurements because it's usually
            # way higher than all the others. That way we're only comparing
            # configurations once they're all warmed up
            data = data[10:]
            median, avg, stddev = _stats(data)
            avgs.append((CACHE, avg * 1000))
            medians.append((CACHE, median * 1000))
            r.write('{}{}{}{}{}\n'.format(
                CACHE.ljust(P),
                str(len(data)).rjust(P),
                fmt_ms(avg),
                fmt_ms(median),
                fmt_ms(stddev),
            ))

    r.write('\n')

    graph = Pyasciigraph(float_format='{0:,.3f}')
    for line in graph.graph('Best Averages (shorter better)', avgs):
        print(line, file=r)
    for line in graph.graph('Best Medians (shorter better)', medians):
        print(line, file=r)

    print('\n', file=r)

    sizes = []
    for name in settings.CACHE_NAMES:
        connection = get_redis_connection(name)
        sizes.append((name, connection.strlen(":1:benchmarking")))

    graph = Pyasciigraph(human_readable='si', )
    for line in graph.graph('Size of Data Saved (shorter better)', sizes):
        print(line, file=r)

    print('\n', file=r)
    return r
Exemplo n.º 2
0
    def output(self, args, begin_ns, end_ns, final=0):
        count = 0
        limit = args.top
        total_ns = end_ns - begin_ns
        graph = Pyasciigraph()
        values = []
        print('%s to %s' % (ns_to_asctime(begin_ns), ns_to_asctime(end_ns)))
        for tid in sorted(self.state.tids.values(),
                          key=operator.attrgetter('cpu_ns'), reverse=True):
            if len(args.proc_list) > 0 and tid.comm not in args.proc_list:
                continue
            pc = float("%0.02f" % ((tid.cpu_ns * 100) / total_ns))
            if tid.migrate_count > 0:
                migrations = ", %d migrations" % (tid.migrate_count)
            else:
                migrations = ""
            values.append(("%s (%d)%s" % (tid.comm, tid.tid, migrations), pc))
            count = count + 1
            if limit > 0 and count >= limit:
                break
        for line in graph.graph("Per-TID CPU Usage", values, unit=" %"):
            print(line)

        values = []
        total_cpu_pc = 0
        for cpu in sorted(self.state.cpus.values(),
                          key=operator.attrgetter('cpu_ns'), reverse=True):
            cpu_pc = float("%0.02f" % cpu.cpu_pc)
            total_cpu_pc += cpu_pc
            values.append(("CPU %d" % cpu.cpu_id, cpu_pc))
        for line in graph.graph("Per-CPU Usage", values, unit=" %"):
            print(line)
        print("\nTotal CPU Usage: %0.02f%%\n" %
              (total_cpu_pc / len(self.state.cpus.keys())))
Exemplo n.º 3
0
def graph_intervals(tl_videos, interval=timedelta(hours=1)):
    """
     Plot ascii frequency of photos per bin
    """
    bins = {}
    for video in tl_videos:
        # round bin start
        start = prev_mark(interval, video.start)
        end = next_mark(interval, video.end)

        # generate a list of marks
        video_extents = list(
            rrule(SECONDLY,
                  dtstart=start,
                  until=end,
                  interval=int(interval.total_seconds())))

        for bin_start in video_extents:
            images_in_slice = [
                im for im in video.images
                if bin_start <= im.taken < bin_start + interval
            ]
            bins[bin_start] = len(images_in_slice)

        graphable = []
        for h in sorted(bins):
            # print("{}:{}".format(h,freq[h]))
            graphable.append(tuple((h.isoformat(), bins[h])))
            # print (graphable)
        graph = Pyasciigraph()

    for line in graph.graph('Frequency per {}'.format(interval), graphable):
        print(line)
    def calculate_text_coverage(self):
        """
        Prints CLI stats about percentage of matched dbpedia facts in wiki raw text.
        """
        matched_count = self.count_matches()
        total_count = {}
        for entity, relation_types in self.dbpedia.iteritems():
            for relation, values in relation_types.iteritems():
                target_resources = values.get('resources', [])
                total_count.setdefault(relation, 0)
                total_count[relation] += len(target_resources)

        occurrence_count = {}
        for relation in total_count:
            occurrence_count[relation] = {
                'total':
                total_count[relation],
                'matched':
                min(total_count[relation],
                    matched_count.setdefault(relation, 0))
            }  # there might be more occurrences of a fact in an article, thus, resulting in a coverage above 100%

        # print bar chart
        data = [
            ('%  ' + str(vals['matched']) + '/' + str(vals['total']) + ' ' +
             rel.split('/')[-1], vals['matched'] / vals['total'] * 100)
            for rel, vals in occurrence_count.iteritems()
        ]
        graph = Pyasciigraph()
        for line in graph.graph('occurred facts in percentage', data):
            print(line)
Exemplo n.º 5
0
def graph_thresholds(test_data):
    # H color test
    # Multicolor on one line
    print('\nMultiColor example:')

    # Color lines according to Thresholds
    thresholds = {
        51: Gre,
        100: Blu,
        350: Yel,
        500: Red,
    }
    data = hcolor(test_data, thresholds)

    # graph with colors, power of 1000, different graph symbol,
    # float formatting and a few tweaks
    graph = Pyasciigraph(
        line_length=120,
        min_graph_length=50,
        separator_length=4,
        multivalue=True,
        human_readable='si',
        graphsymbol='*',  # comment out if you want to use solid bars
        float_format='{0:,.2f}',
        force_max_value=2000,
    )

    for line in graph.graph(label='With Thresholds', data=data):
        print(line)

    return graph, data
Exemplo n.º 6
0
 def test_human_readable_si(self):
     test = [('long_labe☭', 1234), ('sl', 1231234), ('line3', 1231231234),
             ('line4', 1231231231234), ('line5', 1231231231231234),
             ('line6', 1231231231231231234),
             ('line7', 1231231231231231231234),
             ('line8', 1231231231231231231231234),
             ('line9', 123231231231231231231231234)]
     graph = Pyasciigraph(human_readable='si')
     res = graph.graph('☭test print', test)
     expected = [
         '☭test print',
         '###############################################################################',
         '                                                                 1K  long_labe☭',
         '                                                                 1M  sl        ',
         '                                                                 1G  line3     ',
         '                                                                 1T  line4     ',
         '                                                                 1P  line5     ',
         '                                                                 1E  line6     ',
         '                                                                 1Z  line7     ',
         '                                                                 1Y  line8     ',
         '█████████████████████████████████████████████████████████████  123Y  line9     ',
     ]
     gprint(res)
     gprint(expected)
     assert res == expected
Exemplo n.º 7
0
 def irq_list_to_freq(self, irq, _min, _max, res, name, nr):
     step = (_max - _min) / res
     if step == 0:
         return
     buckets = []
     values = []
     graph = Pyasciigraph()
     for i in range(res):
         buckets.append(i * step)
         values.append(0)
     for i in irq["list"]:
         v = (i.stop_ts - i.start_ts) / 1000
         b = min(int((v - _min) / step), res - 1)
         values[b] += 1
     g = []
     i = 0
     for v in values:
         g.append(("%0.03f" % (i * step + _min), v))
         i += 1
     for line in graph.graph('Handler duration frequency distribution %s '
                             '(%s) (usec)' % (name, nr),
                             g,
                             info_before=True,
                             count=True):
         print(line)
     print("")
Exemplo n.º 8
0
 def plot(self):
     graph = Pyasciigraph()
     print()
     for line in graph.graph(self.name, list(self.data.items())):
         print(line)
     print()
     print("-" * 80)
Exemplo n.º 9
0
 def test_neg_multicolor(self):
     test = [('testval0', 600), ('testval1', 400, Red),
             ('testval2', [(600, Gre), (500, Blu)]),
             ('testval3', [(200, Yel), (100, )]), ('testval4', -170, Cya),
             ('testval5', 50, Blu), ('testval6', [(-300, Gre),
                                                  (-230, Red)]),
             ('testval7', [(-100, Gre), (-230, Red), (200, Yel),
                           (600, Blu)])]
     graph = Pyasciigraph()
     res = graph.graph('☭test print', test)
     expected = [
         u'\u262dtest print',
         u'###############################################################################',
         u'                \u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588                 600  testval0',
         u'                \x1b[0;31m\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\x1b[0m                            \x1b[0;31m400\x1b[0m  testval1',
         u'                \x1b[0;34m\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\x1b[0m\x1b[0;32m\u2588\u2588\u2588\u2588\u2588\x1b[0m              \x1b[0;32m600\x1b[0m,\x1b[0;34m500\x1b[0m  testval2',
         u'                \u2588\u2588\u2588\u2588\u2588\x1b[0;33m\u2588\u2588\u2588\u2588\u2588\x1b[0m                                    \x1b[0;33m200\x1b[0m,100  testval3',
         u'       \x1b[0;36m\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\x1b[0m                                                 \x1b[0;36m-170\x1b[0m  testval4',
         u'                \x1b[0;34m\u2588\u2588\x1b[0m                                                 \x1b[0;34m50\x1b[0m  testval5',
         u' \x1b[0;32m\u2588\u2588\u2588\x1b[0m\x1b[0;31m\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\x1b[0m                                            \x1b[0;32m-300\x1b[0m,\x1b[0;31m-230\x1b[0m  testval6',
         u'    \x1b[0;31m\u2588\u2588\u2588\u2588\u2588\u2588\u2588\x1b[0m\x1b[0;32m\u2588\u2588\u2588\u2588\u2588\x1b[0m\x1b[0;33m\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\x1b[0m\x1b[0;34m\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\x1b[0m   \x1b[0;32m-100\x1b[0m,\x1b[0;31m-230\x1b[0m,\x1b[0;33m200\x1b[0m,\x1b[0;34m600\x1b[0m  testval7'
     ]
     gprint(res)
     gprint(expected)
     assert res == expected
Exemplo n.º 10
0
 def iotop_output_print_file_write(self, files):
     # Compute files read
     count = 0
     limit = self._arg_limit
     graph = Pyasciigraph()
     values = []
     sorted_f = sorted(files.items(),
                       key=lambda files: files[1]['write'],
                       reverse=True)
     for f in sorted_f:
         if f[1]["write"] == 0:
             continue
         info_fmt = "{:>10}".format(
             common.convert_size(f[1]["write"], padding_after=True))
         values.append(("%s %s %s" %
                        (info_fmt, f[1]["name"], str(f[1]["other"])[1:-1]),
                        f[1]["write"]))
         count = count + 1
         if limit > 0 and count >= limit:
             break
     for line in graph.graph('Files Write',
                             values,
                             sort=2,
                             with_value=False):
         print(line)
Exemplo n.º 11
0
    def output(self, args, begin_ns, end_ns, final=0):
        count = 0
        limit = args.top
        graph = Pyasciigraph()
        values = []
        print('%s to %s' % (ns_to_asctime(begin_ns), ns_to_asctime(end_ns)))
        for tid in sorted(self.state.tids.values(),
                          key=operator.attrgetter('allocated_pages'),
                          reverse=True):
            values.append(
                ("%s (%d)" % (tid.comm, tid.tid), tid.allocated_pages))
            count = count + 1
            if limit > 0 and count >= limit:
                break
        for line in graph.graph("Per-TID Memory Allocations",
                                values,
                                unit=" pages"):
            print(line)

        values = []
        count = 0
        for tid in sorted(self.state.tids.values(),
                          key=operator.attrgetter('freed_pages'),
                          reverse=True):
            values.append(("%s (%d)" % (tid.comm, tid.tid), tid.freed_pages))
            count = count + 1
            if limit > 0 and count >= limit:
                break
        for line in graph.graph("Per-TID Memory Deallocation",
                                values,
                                unit=" pages"):
            print(line)
        print("\nTotal memory usage:\n- %d pages allocated\n- %d pages freed" %
              (self.state.mm["allocated_pages"], self.state.mm["freed_pages"]))
Exemplo n.º 12
0
def print_graph(dataset, title):
    graph = Pyasciigraph(
        separator_length=4,
        multivalue=False,
        human_readable='si',
    )
    chart = []
    keys = sorted(dataset.keys())
    mean = np.mean(list(dataset.values()))
    # median = np.median(list(dataset.values()))

    for key in keys:
        chart.append((key, dataset[key]))

    if (not args.no_color):
        thresholds = {
            int(mean): Gre,
            int(mean * 2): Yel,
            int(mean * 3): Red,
        }

        data = hcolor(chart, thresholds)
    else:
        data = chart

    for line in graph.graph(title, data):
        print(line)
Exemplo n.º 13
0
    def show_index(self):
        graphArray = []

        for coin in IndexedCoinModel.select():
            graphArray.append((coin.Ticker, coin.DesiredPercentage))

        pyGraph = Pyasciigraph(line_length=50,
                               min_graph_length=50,
                               separator_length=4,
                               multivalue=False,
                               human_readable='si',
                               graphsymbol='*',
                               float_format='{0:,.2f}')

        thresholds = {
            15: Gre,
            30: Blu,
            50: Yel,
            60: Red,
        }
        data = hcolor(graphArray, thresholds)

        sys.stdout.write("\n")
        for line in pyGraph.graph('Index Distribution', data=data):
            sys.stdout.write(line + "\n")
        sys.stdout.write("\n")
Exemplo n.º 14
0
def print_charts(dataset, title, args, weekday=False):
    chart = []
    keys = sorted(dataset.keys())
    mean = numpy.mean(list(dataset.values()))
    median = numpy.median(list(dataset.values()))

    for key in keys:
        if dataset[key] >= median * 1.33:
            displayed_key = "%s (\033[92m+\033[0m)" % (int_to_weekday(key) if weekday else key)
        elif dataset[key] <= median * 0.66:
            displayed_key = "%s (\033[91m-\033[0m)" % (int_to_weekday(key) if weekday else key)
        else:
            displayed_key = (int_to_weekday(key) if weekday else key)
        chart.append((displayed_key, dataset[key]))

    thresholds = {
        int(mean): Gre, int(mean * 2): Yel, int(mean * 3): Red,
    }

    data = hcolor(chart, thresholds)

    graph = Pyasciigraph(
        separator_length=4,
        multivalue=False,
        human_readable='si',
    )

    for line in graph.graph(title, data):
        if args.no_color:
            ansi_escape = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]')
            line = ansi_escape.sub('', line)
        print(line)
    print("")
Exemplo n.º 15
0
def print_charts(dataset, title, weekday=False):
    """ Prints nice charts based on a dict {(key, value), ...} """
    chart = []
    keys = sorted(dataset.keys())
    mean = numpy.mean(list(dataset.values()))
    median = numpy.median(list(dataset.values()))

    for key in keys:
        if (dataset[key] >= median * 1.33):
            displayed_key = "%s (\033[92m+\033[0m)" % (int_to_weekday(key) if weekday else key)
        elif (dataset[key] <= median * 0.66):
            displayed_key = "%s (\033[91m-\033[0m)" % (int_to_weekday(key) if weekday else key)
        else:
            displayed_key = (int_to_weekday(key) if weekday else key)

        chart.append((displayed_key, dataset[key]))

    thresholds = {
        int(mean): Gre, int(mean * 2): Yel, int(mean * 3): Red,
    }
    data = hcolor(chart, thresholds)

    graph = Pyasciigraph(
        separator_length=4,
        multivalue=False,
        human_readable='si',
    )

    for line in graph.graph(title, data):
        print(line)
    print("")
Exemplo n.º 16
0
 def __init__(self):
     self._cur_oxygen = 0
     self._cur_systolic = 0
     self._cur_diastolic = 0
     self._cur_pulse = 0
     self._graph_lock = threading.Lock()
     self._graph = Pyasciigraph()
Exemplo n.º 17
0
 def iotop_output_write(self):
     count = 0
     limit = self._arg_limit
     graph = Pyasciigraph()
     values = []
     for tid in sorted(self.state.tids.values(),
                       key=operator.attrgetter('write'), reverse=True):
         if not self.filter_process(tid):
             continue
         info_fmt = "{:>10} {:<25} {:>9} file {:>9} net {:>9} unknown "
         values.append((info_fmt.format(
                        common.convert_size(tid.write, padding_after=True),
                        "%s (%d)" % (tid.comm, tid.pid),
                        common.convert_size(tid.disk_write,
                                            padding_after=True),
                        common.convert_size(tid.net_write,
                                            padding_after=True),
                        common.convert_size(tid.unk_write,
                                            padding_after=True)),
                        tid.write))
         count = count + 1
         if limit > 0 and count >= limit:
             break
     for line in graph.graph('Per-process I/O Write', values,
                             with_value=False):
         print(line)
Exemplo n.º 18
0
    def _print_results(self, begin_ns, end_ns, final=0):
        count = 0
        limit = self._arg_limit
        graph = Pyasciigraph()
        values = []
        self.state = self._automaton.state
        alloc = 0
        freed = 0
        print('Timerange: [%s, %s]' %
              (common.ns_to_hour_nsec(
                  begin_ns, gmt=self._arg_gmt, multi_day=True),
               common.ns_to_hour_nsec(
                   end_ns, gmt=self._arg_gmt, multi_day=True)))
        for tid in sorted(self.state.tids.values(),
                          key=operator.attrgetter('allocated_pages'),
                          reverse=True):
            if not self.filter_process(tid):
                continue
            values.append(
                ("%s (%d)" % (tid.comm, tid.tid), tid.allocated_pages))
            count = count + 1
            if limit > 0 and count >= limit:
                break
        for line in graph.graph("Per-TID Memory Allocations",
                                values,
                                unit=" pages"):
            print(line)

        values = []
        count = 0
        for tid in sorted(self.state.tids.values(),
                          key=operator.attrgetter('freed_pages'),
                          reverse=True):
            if not self.filter_process(tid):
                continue
            values.append(("%s (%d)" % (tid.comm, tid.tid), tid.freed_pages))
            count = count + 1
            freed += tid.freed_pages
            if limit > 0 and count >= limit:
                break
        for line in graph.graph("Per-TID Memory Deallocation",
                                values,
                                unit=" pages"):
            print(line)

        for tid in sorted(self.state.tids.values(),
                          key=operator.attrgetter('allocated_pages'),
                          reverse=True):
            if not self.filter_process(tid):
                continue
            alloc += tid.allocated_pages
        for tid in sorted(self.state.tids.values(),
                          key=operator.attrgetter('freed_pages'),
                          reverse=True):
            if not self.filter_process(tid):
                continue
            freed += tid.freed_pages
        print("\nTotal memory usage:\n- %d pages allocated\n- %d pages freed" %
              (alloc, freed))
Exemplo n.º 19
0
def synthesize_trials(block: Block) -> List[dict]:
    # TODO: Do this in separate thread, and output some kind of progress indicator.
    json_data = __generate_json_request(block)

    solutions = cast(List[dict], [])

    # Make sure the local image is up-to-date.
    update_docker_image("sweetpea/server")

    # 1. Start a container for the sweetpea server, making sure to use -d and -p to map the port.
    container = start_docker_container("sweetpea/server", 8080)

    # 2. POST to /experiments/generate using the backend request json as the body.
    # TOOD: Do this in separate thread, and output some kind of progress indicator.
    print("Sending formula to backend... ", end='', flush=True)
    t_start = datetime.now()
    try:
        __check_server_health()

        experiments_request = requests.post(
            'http://localhost:8080/experiments/generate', data=json_data)
        if experiments_request.status_code != 200 or not experiments_request.json(
        )['ok']:
            tmp_filename = ""
            with tempfile.NamedTemporaryFile(delete=False) as f:
                f.write(str.encode(json_data))
                tmp_filename = f.name

            raise RuntimeError(
                "Received non-200 response from experiment generation! LowLevelRequest body saved to temp file '"
                + tmp_filename + "' status_code=" +
                str(experiments_request.status_code) + " response_body=" +
                str(experiments_request.text))

        solutions = experiments_request.json()['solutions']
        t_end = datetime.now()
        print(str((t_end - t_start).seconds) + "s")

    # 3. Stop and then remove the docker container.
    finally:
        stop_docker_container(container)

    # 4. Decode the results
    result = list(map(lambda s: __decode(block, s['assignment']), solutions))

    # Dump histogram of frequency distribution, just to make sure it's somewhat even.
    print()
    print("Found " + str(len(solutions)) + " distinct solutions.")
    print()
    hist_data = [("Solution #" + str(idx + 1), sol['frequency'])
                 for idx, sol in enumerate(solutions)]
    hist_data.sort(key=lambda tup: tup[1], reverse=True)

    graph = Pyasciigraph()
    for line in graph.graph('Most Frequently Sampled Solutions',
                            hist_data[:15]):
        print(line)

    return result
Exemplo n.º 20
0
def makeGraph(fList):
    docs = 0
    vids = 0
    songs = 0
    images = 0
    others = 0
    compressed = 0
    codess = 0
    ll = len(fList)
    if (oss == 1):
        print("Total no of files in system: ", ll)
    else:
        print("Total no of files in user home: ", ll)
    for file in fList:
        if file.name.lower().endswith('.pdf') or file.name.lower().endswith(
                '.docx') or file.name.lower().endswith(
                    '.doc') or file.name.lower().endswith('.txt'):
            docs += 1

        if file.name.lower().endswith('.mp4') or file.name.endswith(
                '.mkv') or file.name.endswith('.avi'):
            vids += 1

        if file.name.lower().endswith('.jpeg') or file.name.endswith(
                '.png') or file.name.endswith('.jpg') or file.name.endswith(
                    '.gif'):
            images += 1

        if file.name.lower().endswith('.mp3') or file.name.endswith(
                '.ogg') or file.name.endswith('.wav'):
            songs += 1

        if file.name.endswith('.apk') or file.name.endswith(
                '.jar') or file.name.endswith('.exe') or file.name.endswith(
                    '.iso') or file.name.endswith(
                        '.dmg') or file.name.endswith(
                            '.csv') or file.name.endswith(
                                '.log') or file.name.endswith('.db'):
            others += 1

        if file.name.lower().endswith('.zip') or file.name.endswith(
                '.7z') or file.name.endswith('.deb') or file.name.endswith(
                    '.tar.gz') or file.name.endswith('.rpm'):
            compressed += 1

        if file.name.endswith('.c') or file.name.endswith(
                '.py') or file.name.endswith('.java') or file.name.endswith(
                    '.cpp'):
            codess += 1

    data = [('docs', docs), ('songs', songs), ('videos', vids),
            ('images', images), ('codes', codess), ("compressed", compressed),
            ('others', others)]

    pattern = [Gre, Yel, Red]
    data = vcolor(data, pattern)
    graph = Pyasciigraph()
    for line in graph.graph('Files on PC', data):
        print(line)
Exemplo n.º 21
0
 def dumptAll(self, context=""):
     global distriDuration
     self.logger.info("Dump all perfmon recorded timings")
     with open(myConfig['DEBUG']['PARTS']['PERFMON']['FILE'],
               "a+") as myfile:
         for part in distriDuration:
             sorted_distriDuration = self.getSortedDuration(part)
             #self.logger.info('Timing for parts :'+part)
             #self.logger.info (sorted_distriDuration)
             graph = Pyasciigraph(graphsymbol='#')
             myfile.write("----------------------- {}\n".format(context))
             myfile.write("Duration Timing for parts : {}\n".format(part))
             for line in graph.graph(part, sorted_distriDuration):
                 myfile.write("{}\n".format(
                     line.encode('ascii', 'ignore').decode('utf-8')))
             totalDur = 0
             totalNb = 0
             for measure in distriDuration[part].items():
                 totalDur = totalDur + (measure[0] * measure[1])
                 totalNb = totalNb + measure[1]
             myfile.write("Average {} ms/processing\n".format(totalDur /
                                                              totalNb))
             #self.logger.info(line.encode('ascii','ignore').decode('utf-8'))
         for part in distriCycle:
             sorted_distriCycle = self.getSortedCycle(part)
             graph = Pyasciigraph(graphsymbol='#')
             myfile.write("----------------------- {}\n".format(context))
             myfile.write("Cycle Timing for parts : {}\n".format(part))
             for line in graph.graph(part, sorted_distriCycle):
                 myfile.write("{}\n".format(
                     line.encode('ascii', 'ignore').decode('utf-8')))
             totalDur = 0
             totalNb = 0
             for measure in distriCycle[part].items():
                 totalDur = totalDur + (measure[0] * measure[1])
                 totalNb = totalNb + measure[1]
             myfile.write("Averazge {} events/s\n".format(
                 1000 / (totalDur / totalNb)))
     myfile.close()
     with open(myConfig['DEBUG']['PARTS']['TRACER']['FILE'],
               "a+") as myfile:
         myfile.write("----------------------- {}\n".format(context))
         for evt in timeline:
             myfile.write("{0} {1} {2} {3}\n".format(
                 evt['ts'], evt['thread'], evt['tag'], evt['state']))
         myfile.close()
Exemplo n.º 22
0
def print_distribution_graph(data, title):
    y = np.bincount(data)
    ii = np.nonzero(y)[0]
    dist = [(str(x[0]), x[1]) for x in zip(ii, y[ii])]
    graph = Pyasciigraph(human_readable='si')
    for line in graph.graph(title, dist):
        print line
    print ""
Exemplo n.º 23
0
def histogram(data, name):
    name = 'count of %s response time by week' % name
    g = Pyasciigraph()
    buckets = defaultdict(int)
    for item in data:
        buckets[int(item // (86400 * 7))] += 1
    for line in g.graph(name, ((k, v) for k, v in buckets.iteritems())):
        print line
Exemplo n.º 24
0
 def output_latencies(self, args):
     graph = Pyasciigraph()
     for proc in self.latency_hist.keys():
         values = []
         for v in self.latency_hist[proc]:
             values.append(("%s" % (v[0]), v[1]))
         for line in graph.graph('%s requests latency (ms)' % proc,
                                 values,
                                 unit=" ms"):
             print(line)
Exemplo n.º 25
0
def barGraph(title, data):
    """
      Dado un titulo y una tupla de datos formados por (titulo, numero)
      imprime un gráfico de barra en la terminal
    """

    print()
    for line in Pyasciigraph().graph(title, data):
        print(line)
    return
Exemplo n.º 26
0
 def print_stats_diagram(self, total_entries):
     data = []
     graph = Pyasciigraph(separator_length=4)
     for resolver_name in sorted(self.resolver_names):
         item = (resolver_name, self.statistics[resolver_name])
         data.append(item)
     item = ('TOTAL', total_entries)
     data.append(item)
     for line in graph.graph('Blocking Statistics:', data):
         print(line)
Exemplo n.º 27
0
 def test_type_output(self):
     test = [('long_labe☭', 423), ('sl', 1234), ('line3', 531), ('line4', 200), ('line5', 834)]
     graph = Pyasciigraph()
     res = graph.graph('test print', test)
     if sys.version < '3':
         expected = unicode
     else:
         expected = str
     for line in res:
         assert type(line) == expected
Exemplo n.º 28
0
def print_graph(header, data, is_bytes=False):
    graph = Pyasciigraph(float_format='{:,.1f}',
                         min_graph_length=20,
                         separator_length=1,
                         line_length=shutil.get_terminal_size(
                             (DEFAULT_TERMINAL_WIDTH, 20)).columns,
                         human_readable='cs' if is_bytes else None)
    for line in graph.graph(header, data):
        print(line)
    print()
Exemplo n.º 29
0
 def iotop_output_nr_sector(self):
     graph = Pyasciigraph()
     values = []
     for disk in sorted(self.state.disks.values(),
                        key=operator.attrgetter('nr_sector'), reverse=True):
         if disk.nr_sector == 0:
             continue
         values.append((disk.prettyname, disk.nr_sector))
     for line in graph.graph('Disk nr_sector', values, unit=" sectors"):
         print(line)
Exemplo n.º 30
0
 def graph_output(self, args, begin_ns, end_ns, final=0):
     for comm in args.proc_list:
         graph = Pyasciigraph()
         values = []
         for sec in sorted(self.history.keys()):
             if comm not in self.history[sec]["proc"].keys():
                 break
             pc = float("%0.02f" %
                        ((self.history[sec]["proc"][comm] * 100) /
                         self.history[sec]["total_ns"]))
             values.append(("%s" % sec_to_hour(sec), pc))
         for line in graph.graph("%s CPU Usage" % comm, values, unit=" %"):
             print(line)
     graph = Pyasciigraph()
     values = []
     for sec in sorted(self.history.keys()):
         pc = float("%0.02f" % (self.history[sec]["cpu"]))
         values.append(("%s" % sec_to_hour(sec), pc))
     for line in graph.graph("Total CPU Usage", values, unit=" %"):
         print(line)