コード例 #1
0
  def testEmittedSampleDatetime(self):
    key = "bogus-test-key"

    # Establish initial sample datetime

    result = metric_utils.establishLastEmittedSampleDatetime(key, 300)

    # Cleanup
    self.addCleanup(collectorsdb.engineFactory().execute,
      schema.emittedSampleTracker.delete().where(
        (schema.emittedSampleTracker.c.key == key)
      )
    )

    self.assertIsInstance(result, datetime)

    # Update latest emitted sample datetime to now

    now = datetime.utcnow().replace(microsecond=0)
    metric_utils.updateLastEmittedSampleDatetime(key, now)

    # Verify that it was updated

    lastEmittedSample = metric_utils.queryLastEmittedSampleDatetime(key)

    self.assertEqual(now, lastEmittedSample)
    self.assertLess(result, lastEmittedSample)
コード例 #2
0
    def testEmittedSampleDatetime(self):
        key = "bogus-test-key"

        # Establish initial sample datetime

        result = metric_utils.establishLastEmittedSampleDatetime(key, 300)

        # Cleanup
        self.addCleanup(
            collectorsdb.engineFactory().execute,
            schema.emittedSampleTracker.delete().where(
                (schema.emittedSampleTracker.c.key == key)))

        self.assertIsInstance(result, datetime)

        # Update latest emitted sample datetime to now

        now = datetime.utcnow().replace(microsecond=0)
        metric_utils.updateLastEmittedSampleDatetime(key, now)

        # Verify that it was updated

        lastEmittedSample = metric_utils.queryLastEmittedSampleDatetime(key)

        self.assertEqual(now, lastEmittedSample)
        self.assertLess(result, lastEmittedSample)
コード例 #3
0
    def testUpdateLastEmittedSampleDatetime(self, collectorsdbMock):
        collectorsdbMock.engineFactory.return_value = Mock(
            spec_set=sqlalchemy.engine.Engine)

        metric_utils.updateLastEmittedSampleDatetime("twitter-tweets-volume",
                                                     datetime.utcnow())

        args, _ = (collectorsdbMock.engineFactory.return_value.execute.
                   call_args_list[0])
        self.assertTrue(args)
        self.assertIsInstance(args[0], sqlalchemy.sql.dml.Update)
コード例 #4
0
  def testUpdateLastEmittedSampleDatetime(self, collectorsdbMock):
    collectorsdbMock.engineFactory.return_value = Mock(
      spec_set=sqlalchemy.engine.Engine)

    metric_utils.updateLastEmittedSampleDatetime("twitter-tweets-volume",
                                                 datetime.utcnow())

    args, _ = (collectorsdbMock
               .engineFactory
               .return_value
               .execute
               .call_args_list[0])
    self.assertTrue(args)
    self.assertIsInstance(args[0], sqlalchemy.sql.dml.Update)
コード例 #5
0
def _forwardNewsVolumeMetrics(metricSpecs,
                              lastEmittedAggTime,
                              stopDatetime,
                              periodSec,
                              metricDestAddr):
  """ Query news volume metrics since the given last emitted timestamp through
  stopDatetime and forward them to htmengine's Metric Listener. Update the
  datetime of the last successfully-emitted news volume metric batch in the
  database.

  NOTE: forwarding will be aborted upon failure to connect to Metic Listener. In
    this case, an error will be logged, and the function will return the UTC
    timestamp of the last successfully-emitted sample aggregation interval. Once
    Metric Listener comes online, a subsequent call to this function will catch
    up by forwarding the stored samples since last successful emission.

  :param metrics: a sequence of NewsVolumeMetricSpec objects corresponding to
    the metrics to be emitted
  :param lastEmittedAggTime: UTC datetime of last successfully-emitted sample
    batch
  :param stopDatetime: non-inclusive upper bound UTC datetime for forwarding
  :param periodSec: aggregation period in seconds
  :param metricDestAddr: two-tuple (metricDestHost, metricDestPort)
  :returns: UTC timestamp of the last successfully-emitted sample batch.
  :rtype: datetime.datetime
  """
  periodTimedelta = timedelta(seconds=periodSec)
  aggStartDatetime = lastEmittedAggTime + periodTimedelta
  while aggStartDatetime < stopDatetime:
    # Get News Volume metrics for one aggregation interval
    aggStopDatetime = aggStartDatetime + periodTimedelta
    symbolToNewsVolumeMap = defaultdict(
      int,
      _queryNewsVolumes(aggStartDatetime, aggStopDatetime))

    # Generate metric samples
    epochTimestamp = date_time_utils.epochFromNaiveUTCDatetime(aggStartDatetime)
    samples = tuple(
      dict(
        metricName=spec.metric,
        value=symbolToNewsVolumeMap[spec.symbol],
        epochTimestamp=epochTimestamp)
      for spec in metricSpecs
    )

    # Emit samples to Metric Listener
    try:
      with metric_utils.metricDataBatchWrite(log=g_log) as putSample:
        for sample in samples:
          putSample(**sample)
    except Exception:
      g_log.exception("Failure while emitting metric data for agg=%s "
                      "containing numSamples=%d",
                      aggStartDatetime, len(samples))
      return lastEmittedAggTime
    else:
      g_log.info("Forwarded numSamples=%d for agg=%s",
                 len(samples), aggStartDatetime)

    # Update db with last successfully-emitted datetime
    metric_utils.updateLastEmittedSampleDatetime(
      key=_EMITTED_NEWS_VOLUME_SAMPLE_TRACKER_KEY,
      sampleDatetime=aggStartDatetime)

    # Set up for next iteration
    lastEmittedAggTime = aggStartDatetime
    aggStartDatetime = aggStopDatetime


  return lastEmittedAggTime
コード例 #6
0
def _forwardNewsVolumeMetrics(metricSpecs, lastEmittedAggTime, stopDatetime,
                              periodSec, metricDestAddr):
    """ Query news volume metrics since the given last emitted timestamp through
  stopDatetime and forward them to htmengine's Metric Listener. Update the
  datetime of the last successfully-emitted news volume metric batch in the
  database.

  NOTE: forwarding will be aborted upon failure to connect to Metic Listener. In
    this case, an error will be logged, and the function will return the UTC
    timestamp of the last successfully-emitted sample aggregation interval. Once
    Metric Listener comes online, a subsequent call to this function will catch
    up by forwarding the stored samples since last successful emission.

  :param metrics: a sequence of NewsVolumeMetricSpec objects corresponding to
    the metrics to be emitted
  :param lastEmittedAggTime: UTC datetime of last successfully-emitted sample
    batch
  :param stopDatetime: non-inclusive upper bound UTC datetime for forwarding
  :param periodSec: aggregation period in seconds
  :param metricDestAddr: two-tuple (metricDestHost, metricDestPort)
  :returns: UTC timestamp of the last successfully-emitted sample batch.
  :rtype: datetime.datetime
  """
    periodTimedelta = timedelta(seconds=periodSec)
    aggStartDatetime = lastEmittedAggTime + periodTimedelta
    while aggStartDatetime < stopDatetime:
        # Get News Volume metrics for one aggregation interval
        aggStopDatetime = aggStartDatetime + periodTimedelta
        symbolToNewsVolumeMap = defaultdict(
            int, _queryNewsVolumes(aggStartDatetime, aggStopDatetime))

        # Generate metric samples
        epochTimestamp = date_time_utils.epochFromNaiveUTCDatetime(
            aggStartDatetime)
        samples = tuple(
            dict(metricName=spec.metric,
                 value=symbolToNewsVolumeMap[spec.symbol],
                 epochTimestamp=epochTimestamp) for spec in metricSpecs)

        # Emit samples to Metric Listener
        try:
            with metric_utils.metricDataBatchWrite(log=g_log) as putSample:
                for sample in samples:
                    putSample(**sample)
        except Exception:
            g_log.exception(
                "Failure while emitting metric data for agg=%s "
                "containing numSamples=%d", aggStartDatetime, len(samples))
            return lastEmittedAggTime
        else:
            g_log.info("Forwarded numSamples=%d for agg=%s", len(samples),
                       aggStartDatetime)

        # Update db with last successfully-emitted datetime
        metric_utils.updateLastEmittedSampleDatetime(
            key=_EMITTED_NEWS_VOLUME_SAMPLE_TRACKER_KEY,
            sampleDatetime=aggStartDatetime)

        # Set up for next iteration
        lastEmittedAggTime = aggStartDatetime
        aggStartDatetime = aggStopDatetime

    return lastEmittedAggTime
コード例 #7
0
def main():
    """
  NOTE: main also serves as entry point for "console script" generated by setup
  """
    logging_support.LoggingSupport().initTool()

    try:
        options = _parseArgs()

        g_log.info("Verifying that agents are in hot_standby mode")
        for section in config.sections():
            try:
                assert config.get(section, "opmode") == ApplicationConfig.OP_MODE_HOT_STANDBY
            except Exception, e:
                raise

        g_log.info("Verifying that the old symbol has been removed from the " "metrics configuration")
        for stockData in metric_utils.getMetricsConfiguration().itervalues():
            assert stockData["symbol"] != options.old_symbol

        if options.twitter and (not options.stocks):
            g_log.info(
                "Migrating ONLY twitter data from old-symbol=%s " "to new-symbol=%s",
                options.old_symbol,
                options.new_symbol,
            )
        elif options.stocks and (not options.twitter):
            g_log.info(
                "Migrating ONLY xignite stock data from old-symbol=%s " "to new-symbol=%s",
                options.old_symbol,
                options.new_symbol,
            )
            raise NotImplementedError
        else:
            g_log.info(
                "Migrating BOTH twitter and xignite stock data from " "old-symbol=%s to new-symbol=%s",
                options.old_symbol,
                options.new_symbol,
            )
            raise NotImplementedError

        oldSymbolTweetPrefix = "TWITTER.TWEET.HANDLE.{symbol}.".format(symbol=options.old_symbol)
        newSymbolTweetPrefix = "TWITTER.TWEET.HANDLE.{symbol}.".format(symbol=options.new_symbol)
        oldSymbolTweetMetricsList = []

        with collectorsdb.engineFactory().begin() as conn:

            g_log.info("Renaming metrics to new symbol")
            if options.twitter:
                oldSymbolTweetsQuery = sql.select([tweetSamplesSchema]).where(
                    tweetSamplesSchema.c.metric.contains(oldSymbolTweetPrefix)
                )
                oldSymbolTweets = conn.execute(oldSymbolTweetsQuery)
                for tweetSample in oldSymbolTweets:
                    newMetricName = "{newPrefix}{metric}".format(
                        newPrefix=newSymbolTweetPrefix, metric=tweetSample.metric[len(oldSymbolTweetPrefix) :]
                    )
                    if tweetSample.metric not in oldSymbolTweetMetricsList:
                        oldSymbolTweetMetricsList.append(tweetSample.metric)

                    updateSampleQuery = (
                        tweetSamplesSchema.update()
                        .where(tweetSamplesSchema.c.seq == tweetSample.seq)
                        .values(metric=newMetricName)
                    )

                    conn.execute(updateSampleQuery)

            g_log.info("Forwarding new twitter metric data to Taurus engine...")
            if options.twitter:
                oldestRecordTs = conn.execute(
                    sql.select([tweetSamplesSchema.c.agg_ts], order_by=tweetSamplesSchema.c.agg_ts.asc())
                ).first()[0]
                lastEmittedAggTime = metric_utils.establishLastEmittedSampleDatetime(
                    key=_EMITTED_TWEET_VOLUME_SAMPLE_TRACKER_KEY, aggSec=options.aggPeriod
                )
                aggOffset = (
                    math.ceil(
                        (epochFromNaiveUTCDatetime(lastEmittedAggTime) - epochFromNaiveUTCDatetime(oldestRecordTs))
                        / options.aggPeriod
                    )
                    * options.aggPeriod
                )
                aggStartDatetime = (
                    lastEmittedAggTime - timedelta(seconds=aggOffset) - timedelta(seconds=options.aggPeriod)
                )

                metric_utils.updateLastEmittedSampleDatetime(
                    key=_EMITTED_TWEET_VOLUME_SAMPLE_TRACKER_KEY, sampleDatetime=aggStartDatetime
                )

                MetricDataForwarder.runInThread(
                    metricSpecs=loadMetricSpecs(),
                    aggSec=options.aggPeriod,
                    symbolList=[options.new_symbol],
                    forwardOnlyBacklog=True,
                )

                metric_utils.updateLastEmittedSampleDatetime(
                    key=_EMITTED_TWEET_VOLUME_SAMPLE_TRACKER_KEY, sampleDatetime=lastEmittedAggTime
                )

        g_log.info("Forwarding metrics to dynamodb using new symbol...")
        if options.twitter:
            migrate_tweets_to_dynamodb.main(symbolList=[options.new_symbol])

        g_log.info("Unmonitoring and deleting existing metrics associated with " "symbol=%s", options.old_symbol)
        oldModels = metric_utils.getSymbolModels(options.htmServer, options.apikey, options.old_symbol)
        for model in oldModels:
            metric_utils.unmonitorMetric(options.htmServer, options.apikey, model.uid)
            metric_utils.deleteMetric(options.htmServer, options.apikey, model.name)