Exemple #1
0
    def _queryTimeBounds(self, attrs):
        d = Stuff()

        # Determine query time span and granularity.
        d.span = attrs.get('span', 'hour')
        if d.span not in ('hour', 'day', 'week', 'month', 'year'):
            d.span = 'hour'

        d.end = (attrs.get('end', '').isdigit()
                 and int(attrs['end'])) or time.time()
        range = timeseries(d.span, d.end, d.end)[0]
        if d.end != range[0]:
            d.end = range[1]

        if 'start' in attrs and attrs['start'].isdigit():
            d.start = int(attrs['start'])
            if d.span == 'hour' and d.start < d.end - 336 * 3600:
                d.start = d.end - 336 * 3600
            elif d.span == 'day' and d.start < d.end - 365 * 86400:
                d.start = d.end - 365 * 86400
            elif d.span == 'week' and d.start < d.end - 312 * 7 * 86400:
                d.start = d.end - 312 * 7 * 86400
        elif d.span == 'hour':
            d.start = d.end - 96 * 3600
        elif d.span == 'day':
            d.start = d.end - 90 * 86400
        elif d.span == 'week':
            d.start = d.end - 52 * 7 * 86400
        else:
            d.start = 0

        d.start = max(d.start, time2gm(year=2004, month=6, day=1))
        d.start = timeseries(d.span, d.start, d.start)[0][0]
        d.querybin = ((d.span == 'hour' and 3600) or 86400)
        return d
Exemple #2
0
  def _queryTimeBounds(self, attrs):
    d = Stuff()

    # Determine query time span and granularity.
    d.span = attrs.get('span', 'hour')
    if d.span not in ('hour', 'day', 'week', 'month', 'year'):
      d.span = 'hour'

    d.end = (attrs.get('end', '').isdigit() and int(attrs['end'])) or time.time()
    range = timeseries(d.span, d.end, d.end)[0]
    if d.end != range[0]:
      d.end = range[1]

    if 'start' in attrs and attrs['start'].isdigit():
      d.start = int(attrs['start'])
      if d.span == 'hour' and d.start < d.end-336*3600:
	d.start = d.end - 336*3600
      elif d.span == 'day' and d.start < d.end-365*86400:
	d.start = d.end - 365*86400
      elif d.span == 'week' and d.start < d.end-312*7*86400:
	d.start = d.end - 312*7*86400
    elif d.span == 'hour':
      d.start = d.end - 96*3600
    elif d.span == 'day':
      d.start = d.end - 90*86400
    elif d.span == 'week':
      d.start = d.end - 52*7*86400
    else:
      d.start = 0

    d.start = max(d.start, time2gm(year=2004, month=6, day=1))
    d.start = timeseries(d.span, d.start, d.start)[0][0]
    d.querybin = ((d.span == 'hour' and 3600) or 86400)
    return d
Exemple #3
0
  def _linkData(self, dbs, attrs):
    d = self._linkOptions(attrs)

    # Define the binning.
    series = timeseries(d.span, d.start, d.end)
    data = [{} for t in series]
    bins = numpy.zeros(len(series)+2)
    for i in xrange(0, len(series)):
      bins[i+1] = series[i][0]
    bins[-1] = series[-1][1]

    # Group raw database data as per request.
    d.allkeys = {}
    d.allnodes = {}
    self.lock.acquire()
    try:
      for db in dbs:
        for name, in self._queryData(db, 'nodes', 300, self._fetchNodes):
	  d.allnodes.setdefault(name, {}).setdefault(db['def']['name'], 1)
	rows = self._queryTimeSeries(db, 'link-events', 300,
				     self._fetchLinkEvents,
				     d.querybin, d.start, d.end)
        if len(rows):
          binning = numpy.digitize([r[0] for r in rows], bins)
        for i in xrange(0, len(rows)):
          binidx = binning[i]
	  if binidx == 1 or binidx == len(data)+2:
	    continue

          row = rows[i]
          bin = row[0]
          dest = row[1]
          src = row[2]

          destmss = (dest.find("_MSS") >= 0)
          srcmss = (src.find("_MSS") >= 0)
          if d.links == 'mss':
	    if d.qty == 'destination' and not destmss: continue
	    if d.qty == 'source' and not srcmss: continue
	    if d.qty == 'link' and not (srcmss or destmss): continue
          elif d.links == 'nonmss' and (srcmss or destmss): continue
	  if d.srcfilter and re.search(d.srcfilter, src) < 0: continue
	  if d.destfilter and re.search(d.destfilter, dest) < 0: continue

	  (key, label) = d.keyfunc(db['def']['name'], dest, src)
	  d.allkeys[key] = label

	  bindata = data[binidx-2]
	  if key in bindata:
	    bindata[key] += row[3:]
          else:
	    bindata[key] = numpy.array([float(x) for x in row[3:]])
      for db in dbs:
        self._queryPruneCache(db)
	db['db'] = None
    finally:
      self.lock.release()

    self._makeLegend(d)
    return (d, series, data)
Exemple #4
0
    def _blockLatencyData(self, dbs, attrs):
        d = self._linkOptions(attrs)

        # Define the binning.
        series = timeseries(d.span, d.start, d.end)
        data = [{} for t in series]
        bins = numpy.zeros(len(series) + 2)
        for i in xrange(0, len(series)):
            bins[i + 1] = series[i][0]
        bins[-1] = series[-1][1]

        # Group raw database data as per request.
        d.allkeys = {}
        d.allnodes = {}
        self.lock.acquire()
        try:
            for db in dbs:
                d.allnodes.update((name, 1) for (name, ) in self._queryData(
                    db, 'nodes', 300, self._fetchNodes))
                rows = self._queryData(db, 'block-latency', 300,
                                       self._fetchBlockLatency, d.start, d.end)
                if len(rows):
                    binning = numpy.digitize([r[0] for r in rows], bins)
                for i in xrange(0, len(rows)):
                    binidx = binning[i]
                    if binidx == 1 or binidx == len(data) + 2:
                        continue

                    row = rows[i]
                    tdone = row[0]
                    dest = row[1]
                    nbytes = row[2]
                    latency = row[3]

                    destmss = (dest.find("_MSS") >= 0)
                    if d.links == 'mss' and not destmss: continue
                    elif d.links == 'nonmss' and destmss: continue
                    if d.destfilter and re.search(d.destfilter, dest) < 0:
                        continue

                    (key, label) = d.keyfunc(db['def']['name'], dest, dest)
                    d.allkeys[key] = label

                    data[binidx - 2].setdefault(key, []).append(
                        [float(nbytes), float(latency)])
            for db in dbs:
                self._queryPruneCache(db)
                db['db'] = None
        finally:
            self.lock.release()

        self._makeLegend(d)
        return (d, series, data)
Exemple #5
0
  def _blockLatencyData(self, dbs, attrs):
    d = self._linkOptions(attrs)

    # Define the binning.
    series = timeseries(d.span, d.start, d.end)
    data = [{} for t in series]
    bins = numpy.zeros(len(series)+2)
    for i in xrange(0, len(series)):
      bins[i+1] = series[i][0]
    bins[-1] = series[-1][1]

    # Group raw database data as per request.
    d.allkeys = {}
    d.allnodes = {}
    self.lock.acquire()
    try:
      for db in dbs:
	d.allnodes.update((name, 1) for (name,) in
			  self._queryData(db, 'nodes', 300, self._fetchNodes))
	rows = self._queryData(db, 'block-latency', 300,
			       self._fetchBlockLatency,
			       d.start, d.end)
        if len(rows):
          binning = numpy.digitize([r[0] for r in rows], bins)
        for i in xrange(0, len(rows)):
          binidx = binning[i]
	  if binidx == 1 or binidx == len(data)+2:
	    continue

          row = rows[i]
          tdone = row[0]
          dest = row[1]
          nbytes = row[2]
          latency = row[3]

          destmss = (dest.find("_MSS") >= 0)
          if d.links == 'mss' and not destmss: continue
          elif d.links == 'nonmss' and destmss: continue
	  if d.destfilter and re.search(d.destfilter, dest) < 0: continue

	  (key, label) = d.keyfunc(db['def']['name'], dest, dest)
	  d.allkeys[key] = label

	  data[binidx-2].setdefault(key, []).append([float(nbytes), float(latency)])
      for db in dbs:
        self._queryPruneCache(db)
	db['db'] = None
    finally:
      self.lock.release()

    self._makeLegend(d)
    return (d, series, data)
Exemple #6
0
        def state(self, session):
            start = ''
            end = session.get('phedex.period.end', time.time())
            span = session.get('phedex.period.span', 'hour')
            ntime = session.get('phedex.period.time', '')
            end = timeseries(span, end, end)[0][1]
            if ntime != '':
                if span == 'hour':
                    start = end - 3600 * ntime
                elif span == 'day':
                    start = end - 86400 * ntime
                elif span == 'week':
                    start = end - 7 * 86400 * ntime
                else:
                    start = 0
                ts = timeseries(span, start, end)
                start = (ntime <= len(ts) and ts[-ntime][0]) or ts[0][0]
                end = ts[-1][1]

            return "{kind: '%s', version: %d, start: %s, end: %s, %s}" \
               % (self.plugin, time.time(), repr(start), repr(end),
            ",".join("'%s': %s" % (x.replace("phedex.", ""), repr(session[x]))
             for x in sorted(session.keys())
             if x.startswith("phedex.") and x != 'phedex.view'))
Exemple #7
0
    def state(self, session):
      start = ''
      end = session.get('phedex.period.end', time.time())
      span = session.get('phedex.period.span', 'hour')
      ntime = session.get('phedex.period.time', '')
      end = timeseries(span, end, end)[0][1]
      if ntime != '':
        if span == 'hour':
          start = end - 3600*ntime
        elif span == 'day':
          start = end - 86400*ntime
        elif span == 'week':
          start = end - 7*86400*ntime
        else:
          start = 0
        ts = timeseries(span, start, end)
        start = (ntime <= len(ts) and ts[-ntime][0]) or ts[0][0]
        end = ts[-1][1]

      return "{kind: '%s', version: %d, start: %s, end: %s, %s}" \
         % (self.plugin, time.time(), repr(start), repr(end),
	    ",".join("'%s': %s" % (x.replace("phedex.", ""), repr(session[x]))
		     for x in sorted(session.keys())
		     if x.startswith("phedex.") and x != 'phedex.view'))
Exemple #8
0
    def _linkData(self, dbs, attrs):
        d = self._linkOptions(attrs)

        # Define the binning.
        series = timeseries(d.span, d.start, d.end)
        data = [{} for t in series]
        bins = numpy.zeros(len(series) + 2)
        for i in xrange(0, len(series)):
            bins[i + 1] = series[i][0]
        bins[-1] = series[-1][1]

        # Group raw database data as per request.
        d.allkeys = {}
        d.allnodes = {}
        self.lock.acquire()
        try:
            for db in dbs:
                for name, in self._queryData(db, 'nodes', 300,
                                             self._fetchNodes):
                    d.allnodes.setdefault(name,
                                          {}).setdefault(db['def']['name'], 1)
                rows = self._queryTimeSeries(db, 'link-events', 300,
                                             self._fetchLinkEvents, d.querybin,
                                             d.start, d.end)
                if len(rows):
                    binning = numpy.digitize([r[0] for r in rows], bins)
                for i in xrange(0, len(rows)):
                    binidx = binning[i]
                    if binidx == 1 or binidx == len(data) + 2:
                        continue

                    row = rows[i]
                    bin = row[0]
                    dest = row[1]
                    src = row[2]

                    destmss = (dest.find("_MSS") >= 0)
                    srcmss = (src.find("_MSS") >= 0)
                    if d.links == 'mss':
                        if d.qty == 'destination' and not destmss: continue
                        if d.qty == 'source' and not srcmss: continue
                        if d.qty == 'link' and not (srcmss or destmss):
                            continue
                    elif d.links == 'nonmss' and (srcmss or destmss):
                        continue
                    if d.srcfilter and re.search(d.srcfilter, src) < 0:
                        continue
                    if d.destfilter and re.search(d.destfilter, dest) < 0:
                        continue

                    (key, label) = d.keyfunc(db['def']['name'], dest, src)
                    d.allkeys[key] = label

                    bindata = data[binidx - 2]
                    if key in bindata:
                        bindata[key] += row[3:]
                    else:
                        bindata[key] = numpy.array([float(x) for x in row[3:]])
            for db in dbs:
                self._queryPruneCache(db)
                db['db'] = None
        finally:
            self.lock.release()

        self._makeLegend(d)
        return (d, series, data)