コード例 #1
0
    def aggregateTxnsByRoutes(self, txnSubCollection, ancestry):
        """
    Aggregates transactions in a given subcollection to child collections
    in an ancestry node with conflatable routes

    :param txnSubCollection: Transaction subcollection to be aggregated
    :param ancestry: A node in the tree collection

    """
        from xpedite.types.route import conflateRoutes
        srcRouteMap = self.srcTree.getChildren(ancestry)
        routes = srcRouteMap.keys() if srcRouteMap else []
        routeMap = {}
        for txn in txnSubCollection:
            for dstRoute in routes:
                if conflateRoutes(txn.route, dstRoute):
                    addTxn(routeMap, txnSubCollection, dstRoute, txn)
        return routeMap
コード例 #2
0
ファイル: conflator.py プロジェクト: zeta1999/Xpedite
  def conflateTimelineStats(self, route, src, dst):
    """
    Conflates timelines from source timeline stats to destination timeline stats

    :param route: route used for conflation
    :param src: Source timeline stats
    :param dst: Destination timeline stats

    """
    routeIndices = conflateRoutes(src.route, route)
    if routeIndices:
      topdownMetrics = self.getTopdownMetrics(src.cpuInfo.cpuId, src.topdownKeys)
      self.conflateTimelineCollection(
        routeIndices, src.timelineCollection, dst.timelineCollection,
        src.buildEventsMap(), topdownMetrics
      )
      return True
    return None
コード例 #3
0
ファイル: commands.py プロジェクト: morganstanley/Xpedite
def diffTxn(lhs, rhs, profiles):
  """
  Compares duration/pmc values for a pair of transactions

  :param lhs: Transaction id (lhs value of comparison)
  :type lhs: int
  :param rhs: Transaction id (rhs value of comparison)
  :type rhs: int
  :param profiles: Transactions from the current profile session
  :type profiles: xpedite.report.profile.Profiles

  """
  from xpedite.analytics.timelineFilter   import locateTimeline
  from xpedite.report.diff                import DiffBuilder
  from xpedite.types.route                import conflateRoutes
  from xpedite.analytics.conflator        import Conflator

  timeline1 = locateTimeline(profiles, lhs)
  timeline2 = locateTimeline(profiles, rhs)
  if timeline1 and timeline2:
    lhs, rhs = (timeline1, timeline2) if len(timeline1) > len(timeline2) else (timeline2, timeline1)
    routeIndices = conflateRoutes(lhs.txn.route, rhs.txn.route)
    if not routeIndices:
      display(HTML(ERROR_TEXT.format('Transactions {} and {} are not comparable'.format(lhs.txnId, rhs.txnId))))
      return
    topdownMetrics = Conflator().getTopdownMetrics(profiles.cpuInfo.cpuId, profiles.topdownKeys)
    conflatedTimeline = Conflator().conflateTimeline(lhs, routeIndices, profiles.eventsMap, topdownMetrics)
    display(HTML(str(DiffBuilder().buildDiffTable(conflatedTimeline, rhs))))
  else:
    if not (timeline1 or timeline2):
      display(HTML(ERROR_TEXT.format(
        'Can\'t find transactions. Are these ({} and {}) valid txn id\'s?'.format(lhs, rhs)
      )))
    else:
      txnId = rhs if timeline1 else lhs
      display(HTML(ERROR_TEXT.format('Can\'t find transaction {}, is the txn id valid?'.format(txnId))))
コード例 #4
0
def buildTimelineStats(category, route, probes, txnSubCollection): # pylint: disable=too-many-locals
  """
  Builds timeline statistics from a subcollection of transactions

  :param probes: List of probes enabled for a profiling session
  :param txnSubCollection: A subcollection of transactions

  """
  from xpedite.types import InvariantViloation
  begin = time.time()
  cpuInfo = txnSubCollection.cpuInfo
  topdownMetrics = txnSubCollection.topdownMetrics
  timelineCollection = []
  topdownKeys = topdownMetrics.topdownKeys() if topdownMetrics else []
  deltaSeriesRepo = DeltaSeriesRepo(txnSubCollection.events, topdownKeys, probes)
  pmcNames = deltaSeriesRepo.pmcNames
  eventsMap = deltaSeriesRepo.buildEventsMap()
  timelineStats = TimelineStats(
    txnSubCollection.name, cpuInfo, category, route,
    probes, timelineCollection, deltaSeriesRepo
  )
  tscDeltaSeriesCollection = deltaSeriesRepo.getTscDeltaSeriesCollection()

  pmcCount = len(txnSubCollection.events) if txnSubCollection.events else 0
  inceptionTsc = None
  defaultIndices = range(len(route))

  totalTxnCount = len(txnSubCollection)
  for txnCount, txn in enumerate(txnSubCollection):
    timeline = Timeline(txn)
    indices = conflateRoutes(txn.route, route) if len(txn) > len(route) else defaultIndices
    firstCounter = prevCounter = None
    maxTsc = 0
    i = -1
    endpoint = TimePoint('end', 0, deltaPmcs=([0]* pmcCount if pmcCount > 0 else None))
    for j in indices:
      i += 1
      probe = probes[i]
      counter = txn[j]
      if not compareProbes(probe, counter.probe):
        raise InvariantViloation('category [{}] has mismatch of probes '
          '"{}" vs "{}" in \n\ttransaction {}]\n\troute {}'.format(
            category, probe, counter.probe, txn.txnId, probes
          )
        )

      if counter:
        tsc = counter.tsc
        maxTsc = max(maxTsc, tsc)
        if not firstCounter:
          firstCounter = prevCounter = counter
        elif tsc:
          duration = cpuInfo.convertCyclesToTime(tsc - prevCounter.tsc)
          point = cpuInfo.convertCyclesToTime(prevCounter.tsc - firstCounter.tsc)
          timePoint = TimePoint(probes[i-1].name, point, duration, data=prevCounter.data)

          if len(counter.pmcs) < pmcCount:
            raise InvariantViloation(
              'category [{}] has transaction {} with counter {} '
              'missing pmc samples {}/{}'.format(
                category, txn.txnId, counter, len(counter.pmcs), pmcCount
              )
            )
          if pmcCount != 0:
            timePoint.pmcNames = pmcNames
            timePoint.deltaPmcs = []
            for k in range(pmcCount):
              deltaPmc = counter.pmcs[k] - prevCounter.pmcs[k] if counter.threadId == prevCounter.threadId  else NAN
              endpoint.deltaPmcs[k] += (deltaPmc if counter.threadId == prevCounter.threadId else 0)
              timePoint.deltaPmcs.append(deltaPmc)
              deltaSeriesRepo[pmcNames[k]][i-1].addDelta(deltaPmc)
            if topdownMetrics:
              counterMap = CounterMap(eventsMap, timePoint.deltaPmcs)
              timePoint.topdownValues = topdownMetrics.compute(counterMap)
              for td in timePoint.topdownValues:
                deltaSeriesRepo[td.name][i-1].addDelta(td.value)
          timeline.addTimePoint(timePoint)
          tscDeltaSeriesCollection[i-1].addDelta(duration)
          prevCounter = counter
        else:
          raise InvariantViloation(
            'category [{}] has transaction {} with missing tsc for probe {}/counter {}'.format(
              category, txn.txnId, probe, counter
            )
          )
      else:
        raise InvariantViloation(
          'category [{}] has transaction {} with probe {} missing counter data'.format(
            category, probe, txn.txnId
          )
        )

    if prevCounter:
      point = cpuInfo.convertCyclesToTime(prevCounter.tsc - firstCounter.tsc)
      timeline.addTimePoint(TimePoint(probes[-1].name, point, 0, data=prevCounter.data))

    endpoint.duration = cpuInfo.convertCyclesToTime(maxTsc - firstCounter.tsc)
    if pmcCount != 0:
      endpoint.pmcNames = pmcNames
      for k, deltaPmc in enumerate(endpoint.deltaPmcs):
        deltaSeriesRepo[pmcNames[k]][-1].addDelta(deltaPmc)
      if topdownMetrics:
        counterMap = CounterMap(eventsMap, endpoint.deltaPmcs)
        endpoint.topdownValues = topdownMetrics.compute(counterMap)
        for td in endpoint.topdownValues:
          deltaSeriesRepo[td.name][-1].addDelta(td.value)
    timeline.endpoint = endpoint

    timelineCollection.append(timeline)
    tscDeltaSeriesCollection[-1].addDelta(endpoint.duration)

    elapsed = time.time() - begin
    if elapsed >= 5:
      LOGGER.completed(
        '\n\tprocessed %d out of %d transactions | %0.2f%% complete |',
        txnCount, totalTxnCount, float(100 * float(txnCount)/float(totalTxnCount))
      )
      begin = time.time()

    if not inceptionTsc:
      inceptionTsc = firstCounter.tsc
      timeline.inception = 0
    else:
      timeline.inception = int(cpuInfo.convertCyclesToTime(firstCounter.tsc - inceptionTsc) / 1000)

  return timelineStats