Exemple #1
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('--metrics', metavar='M[,M]*', default=defaultMetrics,
                        help='Comma separated list of metrics to print. ' + metricsDescription)
    parser.add_argument('--avg', metavar='N', type=int, default=1,
                        help='Print average every N records.')
    parser.add_argument('--start', metavar='T', type=float, default=0,
                        help='Start trace at time T (drop records with compute start before T ms).')
    parser.add_argument('--gp', action='store_true', help='Print GNUPlot format.')
    parser.add_argument('--no-header', action='store_true', help='Omit the header row.')
    parser.add_argument('name', metavar='filename', help='Trace file.')
    args = parser.parse_args()

    metrics = args.metrics.split(',')
    count = args.gp and (not hasTimestamp(metricts) or len(metrics) == 1)

    if not args.no_header:
        pu.printHeader(allMetrics, metrics, args.gp, count)

    with open(args.name) as f:
        trace = json.load(f)

    if args.start > 0:
        trace = skipTrace(trace, args.start)

    trace = pu.filterData(trace, allMetrics, metrics)

    if args.avg > 1:
        trace = avgData(trace, args.avg, hasTimestamp(metrics))

    pu.printCsv(trace, count)
Exemple #2
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('--features', metavar='F[,F]*', default=defaultFeatures,
                        help='Comma separated list of features to print. ' + featuresDescription)
    parser.add_argument('--total', action='store_true', help='Add total time row.')
    parser.add_argument('--gp', action='store_true', help='Print GNUPlot format.')
    parser.add_argument('--no-header', action='store_true', help='Omit the header row.')
    parser.add_argument('--threshold', metavar='T', default=0.0, type=float,
                        help='Threshold of percentage difference.')
    parser.add_argument('--reference', metavar='R', help='Reference profile file name.')
    parser.add_argument('name', metavar='filename', help='Profile file.')
    args = parser.parse_args()

    global allFeatures
    features = args.features.split(',')
    for f in features:
        if not f in allFeatures:
            print('Feature {} not recognized'.format(f))
            return

    count = args.gp and not hasNames(features)

    profile = None
    reference = None

    with open(args.name) as f:
        profile = json.load(f)
        profileCount = profile[0]['count']
        profile = profile[1:]

    if args.reference:
        with open(args.reference) as f:
            reference = json.load(f)
            referenceCount = reference[0]['count']
            reference = reference[1:]
        allFeatures = mergeHeaders(allFeatures)
        features = mergeHeaders(features, hasNames(features))

    if not args.no_header:
        if reference:
            comment = '#' if args.gp else ''
            print(comment + 'reference count: {} - profile count: {}'.format(referenceCount, profileCount))
        pu.printHeader(allFeatures, features, args.gp, count)

    if reference:
        profile = alignData(reference, profile, args.threshold)

    if args.total:
        profile.append(totalData(allFeatures, profile))
        if reference:
            total = profile[len(profile) - 1]
            total['% difference'] = (total['averageMs'] / total['refAverageMs'] - 1)*100

    profile = pu.filterData(profile, allFeatures, features)

    pu.printCsv(profile, count)