def test_probe_states(capsys): """ Test xpedite probes and probes state for an application's against baseline probe states for the application """ import cPickle as pickle from xpedite.types.probe import compareProbes profileInfo = loadProfileInfo(DATA_DIR, os.path.join(DATA_DIR, 'profileInfo.py'), REMOTE) probes = [] probeMap = {} baselineProbeMap = {} with capsys.disabled(): probes = loadProbes(FIX_DECODER_BINARY, profileInfo, TXN_COUNT, THREAD_COUNT, REMOTE, WORKSPACE) for probe in probes: probeMap[probe.sysName] = probe with open(os.path.join(DATA_DIR, 'probeCmdBaseline.pkl'), 'r') as probeFileHandle: baselineProbes = pickle.load(probeFileHandle) for probe in baselineProbes: baselineProbeMap[probe.sysName] = probe assert len(probes) == len(baselineProbes) findDiff(probeMap, baselineProbeMap) for probe in probeMap.keys(): assert compareProbes(probeMap[probe], baselineProbeMap[probe])
def test_probe_states(capsys, scenarioName): """ Test xpedite probes and probes state for an application's against baseline probe states for the application """ from xpedite.types.probe import compareProbes with SCENARIO_LOADER[scenarioName] as scenarios: probeMap = {} with capsys.disabled(): probes = loadProbes(CONTEXT, scenarios) for probe in probes: probeMap[probe.sysName] = probe assert len(probes) == len(scenarios.baselineProbeMap) findDiff(probeMap, scenarios.baselineProbeMap) for probe in probeMap: assert compareProbes(probeMap[probe], scenarios.baselineProbeMap[probe])
def test_probe_states(capsys): """ Test xpedite probes and probes state for an application's against baseline probe states for the application """ import cPickle as pickle from xpedite.types.probe import compareProbes profileInfo = loadProfileInfo(os.path.join(dataDir, 'profileInfo.py'), remote) probes = [] with capsys.disabled(): probes = loadProbes(DEMO_BINARY, profileInfo, remote) with open(os.path.join(dataDir, 'probeBaseline.pkl'), 'r') as probeFileHandle: baselineProbes = pickle.load(probeFileHandle) assert len(probes) == len(baselineProbes) for i in range(len(probes)): assert compareProbes(probes[i], baselineProbes[i])
def buildTimelineStats(category, route, probes, txnSubCollection): # pylint: disable=too-many-locals """ Builds timeline statistics from a subcollection of transactions :param probes: List of probes enabled for a profiling session :param txnSubCollection: A subcollection of transactions """ from xpedite.types import InvariantViloation begin = time.time() cpuInfo = txnSubCollection.cpuInfo topdownMetrics = txnSubCollection.topdownMetrics timelineCollection = [] topdownKeys = topdownMetrics.topdownKeys() if topdownMetrics else [] deltaSeriesRepo = DeltaSeriesRepo(txnSubCollection.events, topdownKeys, probes) pmcNames = deltaSeriesRepo.pmcNames eventsMap = deltaSeriesRepo.buildEventsMap() timelineStats = TimelineStats( txnSubCollection.name, cpuInfo, category, route, probes, timelineCollection, deltaSeriesRepo ) tscDeltaSeriesCollection = deltaSeriesRepo.getTscDeltaSeriesCollection() pmcCount = len(txnSubCollection.events) if txnSubCollection.events else 0 inceptionTsc = None defaultIndices = range(len(route)) totalTxnCount = len(txnSubCollection) for txnCount, txn in enumerate(txnSubCollection): timeline = Timeline(txn) indices = conflateRoutes(txn.route, route) if len(txn) > len(route) else defaultIndices firstCounter = prevCounter = None maxTsc = 0 i = -1 endpoint = TimePoint('end', 0, deltaPmcs=([0]* pmcCount if pmcCount > 0 else None)) for j in indices: i += 1 probe = probes[i] counter = txn[j] if not compareProbes(probe, counter.probe): raise InvariantViloation('category [{}] has mismatch of probes ' '"{}" vs "{}" in \n\ttransaction {}]\n\troute {}'.format( category, probe, counter.probe, txn.txnId, probes ) ) if counter: tsc = counter.tsc maxTsc = max(maxTsc, tsc) if not firstCounter: firstCounter = prevCounter = counter elif tsc: duration = cpuInfo.convertCyclesToTime(tsc - prevCounter.tsc) point = cpuInfo.convertCyclesToTime(prevCounter.tsc - firstCounter.tsc) timePoint = TimePoint(probes[i-1].name, point, duration, data=prevCounter.data) if len(counter.pmcs) < pmcCount: raise InvariantViloation( 'category [{}] has transaction {} with counter {} ' 'missing pmc samples {}/{}'.format( category, txn.txnId, counter, len(counter.pmcs), pmcCount ) ) if pmcCount != 0: timePoint.pmcNames = pmcNames timePoint.deltaPmcs = [] for k in range(pmcCount): deltaPmc = counter.pmcs[k] - prevCounter.pmcs[k] if counter.threadId == prevCounter.threadId else NAN endpoint.deltaPmcs[k] += (deltaPmc if counter.threadId == prevCounter.threadId else 0) timePoint.deltaPmcs.append(deltaPmc) deltaSeriesRepo[pmcNames[k]][i-1].addDelta(deltaPmc) if topdownMetrics: counterMap = CounterMap(eventsMap, timePoint.deltaPmcs) timePoint.topdownValues = topdownMetrics.compute(counterMap) for td in timePoint.topdownValues: deltaSeriesRepo[td.name][i-1].addDelta(td.value) timeline.addTimePoint(timePoint) tscDeltaSeriesCollection[i-1].addDelta(duration) prevCounter = counter else: raise InvariantViloation( 'category [{}] has transaction {} with missing tsc for probe {}/counter {}'.format( category, txn.txnId, probe, counter ) ) else: raise InvariantViloation( 'category [{}] has transaction {} with probe {} missing counter data'.format( category, probe, txn.txnId ) ) if prevCounter: point = cpuInfo.convertCyclesToTime(prevCounter.tsc - firstCounter.tsc) timeline.addTimePoint(TimePoint(probes[-1].name, point, 0, data=prevCounter.data)) endpoint.duration = cpuInfo.convertCyclesToTime(maxTsc - firstCounter.tsc) if pmcCount != 0: endpoint.pmcNames = pmcNames for k, deltaPmc in enumerate(endpoint.deltaPmcs): deltaSeriesRepo[pmcNames[k]][-1].addDelta(deltaPmc) if topdownMetrics: counterMap = CounterMap(eventsMap, endpoint.deltaPmcs) endpoint.topdownValues = topdownMetrics.compute(counterMap) for td in endpoint.topdownValues: deltaSeriesRepo[td.name][-1].addDelta(td.value) timeline.endpoint = endpoint timelineCollection.append(timeline) tscDeltaSeriesCollection[-1].addDelta(endpoint.duration) elapsed = time.time() - begin if elapsed >= 5: LOGGER.completed( '\n\tprocessed %d out of %d transactions | %0.2f%% complete |', txnCount, totalTxnCount, float(100 * float(txnCount)/float(totalTxnCount)) ) begin = time.time() if not inceptionTsc: inceptionTsc = firstCounter.tsc timeline.inception = 0 else: timeline.inception = int(cpuInfo.convertCyclesToTime(firstCounter.tsc - inceptionTsc) / 1000) return timelineStats