コード例 #1
0
  def testPathwayToDynamoDB(self):
    """ Test metric data pathway to dynamodb
    """

    metricName = "TEST." + "".join(random.sample(string.ascii_letters, 16))

    nativeMetric = {
      "modelParams": {
        "minResolution": 0.2,
        "min": 0.0,
        "max": 10000.0,
      },
      "datasource": "custom",
      "metricSpec": {
        "metric": metricName,
        "resource": "Test",
        "userInfo": {
          "symbol": "TEST",
          "metricType": "TwitterVolume",
          "metricTypeName": "Twitter Volume",
        }
      }
    }
    metricName = nativeMetric["metricSpec"]["metric"]
    instanceName = nativeMetric["metricSpec"]["resource"]
    userInfo = nativeMetric["metricSpec"]["userInfo"]

    now = datetime.datetime.utcnow().replace(minute=0, second=0, microsecond=0)

    data = [
      (5000.0, now - datetime.timedelta(minutes=10)),
      (6000.0, now - datetime.timedelta(minutes=5)),
      (7000.0, now),
    ]

    # We'll be explicitly deleting the metric below, but we need to add a
    # cleanup step that runs in case there is some other failure that prevents
    # that part of the test from being reached.

    def gracefulDelete():
      try:
        self._deleteMetric(metricName)
      except ObjectNotFoundError:
        pass

    self.addCleanup(gracefulDelete)

    # Add custom metric data
    sock = socket.socket()
    sock.connect(("localhost", self.plaintextPort))
    for metricValue, ts in data:
      sock.sendall("%s %r %s\n" % (metricName,
                                   metricValue,
                                   epochFromNaiveUTCDatetime(ts)))

    self.gracefullyCloseSocket(sock)

    uid = self.checkMetricCreated(metricName)

    # Save the uid for later
    LOGGER.info("Metric %s has uid: %s", metricName, uid)

    # Send model creation request
    model = self._createModel(nativeMetric)
    parameters = json.loads(model.parameters)
    self.assertEqual(parameters["metricSpec"]["userInfo"], userInfo)

    for _ in xrange(60):
      with self.engine.begin() as conn:
        metric = repository.getMetric(conn, uid)

      if metric.status == MetricStatus.ACTIVE:
        break
      LOGGER.info("Model=%s not ready. Sleeping 1 second...", uid)
      time.sleep(1)
    else:
      self.fail("Model results not available within 5 minutes")

    # Check that the data all got processed
    self.checkModelResultsSize(uid, 3)

    # Now check that the data was published to dynamodb...
    dynamodb = DynamoDBService.connectDynamoDB()

    metricTable = Table(MetricDynamoDBDefinition().tableName,
                        connection=dynamodb)
    metricItem = metricTable.lookup(uid)
    self.assertEqual(metricItem["uid"], uid)
    self.assertEqual(metricItem["name"], metricName)
    self.assertEqual(metricItem["metricType"], "TwitterVolume")
    self.assertEqual(metricItem["metricTypeName"], "Twitter Volume")
    self.assertEqual(metricItem["symbol"], "TEST")

    metricDataTable = Table(MetricDataDynamoDBDefinition().tableName,
                            connection=dynamodb)
    instanceDataAnomalyScores = {}
    for metricValue, ts in data:
      metricDataItem = _RETRY_ON_ITEM_NOT_FOUND_DYNAMODB_ERROR(
        metricDataTable.lookup
      )(uid, ts.isoformat())
      # There is no server-side cleanup for metric data, so remove it here for
      # now to avoid accumulating test data
      self.addCleanup(metricDataItem.delete)
      self.assertEqual(metricValue, metricDataItem["metric_value"])
      dt = datetime.datetime.strptime(metricDataItem["timestamp"],
                                      "%Y-%m-%dT%H:%M:%S")
      self.assertEqual(ts, dt)
      ts = ts.replace(minute=0, second=0, microsecond=0)
      date = ts.strftime("%Y-%m-%d")
      hour = ts.strftime("%H")
      key = (date, hour)
      maxVal = instanceDataAnomalyScores.get(key, 0.0)
      instanceDataAnomalyScores[key] = max(
          maxVal, metricDataItem["anomaly_score"])

    # And check that the aggregated instance data is updated
    instanceDataHourlyTable = Table(
        InstanceDataHourlyDynamoDBDefinition().tableName, connection=dynamodb)
    for key, anomalyScore in instanceDataAnomalyScores.iteritems():
      date, hour = key
      instanceDataHourlyItem = _RETRY_ON_ITEM_NOT_FOUND_DYNAMODB_ERROR(
        instanceDataHourlyTable.lookup
      )(instanceName, "%sT%s" % (date, hour))
      self.addCleanup(instanceDataHourlyItem.delete)
      self.assertAlmostEqual(
          anomalyScore,
          float(instanceDataHourlyItem["anomaly_score"]["TwitterVolume"]))
      self.assertEqual(date, instanceDataHourlyItem["date"])
      self.assertEqual(hour, instanceDataHourlyItem["hour"])

    # Now send some twitter data and validate that it made it to dynamodb

    twitterData = [
      {
        "metric_name": metricName,
        "tweet_uid": uid,
        "created_at": "2015-02-19T19:43:24.870109",
        "agg_ts": "2015-02-19T19:43:24.870118",
        "text": "Tweet text",
        "userid": "10",
        "username": "******",
        "retweet_count": "0"
      }
    ]

    with MessageBusConnector() as messageBus:
      messageBus.publishExg(
        exchange=self.config.get("non_metric_data", "exchange_name"),
        routingKey=(
          self.config.get("non_metric_data", "exchange_name") + ".twitter"),
        body=json.dumps(twitterData)
      )


    metricTweetsTable = Table(MetricTweetsDynamoDBDefinition().tableName,
                              connection=dynamodb)
    for _ in range(30):
      try:
        metricTweetItem =  metricTweetsTable.lookup(
          twitterData[0]["text"],
          twitterData[0]["agg_ts"]
        )
        break
      except ItemNotFound:
        # LOL eventual consistency
        time.sleep(1)
        continue
    # There is no server-side cleanup for tweet data, so remove it here for
    # now to avoid accumulating test data
    self.addCleanup(metricTweetItem.delete)
    self.assertEqual(metricTweetItem["username"], twitterData[0]["username"])
    self.assertEqual(metricTweetItem["tweet_uid"], twitterData[0]["tweet_uid"])
    self.assertEqual(metricTweetItem["created_at"], twitterData[0]["created_at"])
    self.assertEqual(metricTweetItem["agg_ts"], twitterData[0]["agg_ts"])
    self.assertEqual(metricTweetItem["text"], twitterData[0]["text"])
    self.assertEqual(metricTweetItem["userid"], twitterData[0]["userid"])
    self.assertEqual(metricTweetItem["username"], twitterData[0]["username"])
    self.assertEqual(metricTweetItem["retweet_count"], twitterData[0]["retweet_count"])
    self.assertEqual(metricTweetItem["copy_count"], 0)

    sort_key = twitterData[0]["agg_ts"]

    ts = (epochFromNaiveUTCDatetime(
      datetime.datetime.strptime(twitterData[0]["agg_ts"].partition(".")[0],
                                 "%Y-%m-%dT%H:%M:%S")) * 1e5)
    queryResult = metricTweetsTable.query_2(
      metric_name__eq=metricName,
      sort_key__gte=ts,
      index="taurus.metric_data-metric_name_index")
    queriedMetricTweetItem = next(queryResult)

    self.assertEqual(queriedMetricTweetItem["username"], twitterData[0]["username"])
    self.assertEqual(queriedMetricTweetItem["tweet_uid"], twitterData[0]["tweet_uid"])
    self.assertEqual(queriedMetricTweetItem["created_at"], twitterData[0]["created_at"])
    self.assertEqual(queriedMetricTweetItem["agg_ts"], twitterData[0]["agg_ts"])
    self.assertEqual(queriedMetricTweetItem["text"], twitterData[0]["text"])
    self.assertEqual(queriedMetricTweetItem["userid"], twitterData[0]["userid"])
    self.assertEqual(queriedMetricTweetItem["username"], twitterData[0]["username"])
    self.assertEqual(queriedMetricTweetItem["retweet_count"], twitterData[0]["retweet_count"])
    self.assertEqual(queriedMetricTweetItem["copy_count"], 0)
    self.assertEqual(queriedMetricTweetItem["sort_key"], ts)

    duplicatedTwitterData = [
      {
        "metric_name": "copy of " + metricName,
        "tweet_uid": "copy of " + uid,
        "created_at": "2015-02-19T19:45:24.870109",
        "agg_ts": "2015-02-19T19:43:24.870118", # Same agg_ts!
        "text": "Tweet text", # Same text!
        "userid": "20",
        "username": "******",
        "retweet_count": "0"
      }
    ]

    with MessageBusConnector() as messageBus:
      messageBus.publishExg(
        exchange=self.config.get("non_metric_data", "exchange_name"),
        routingKey=(
          self.config.get("non_metric_data", "exchange_name") + ".twitter"),
        body=json.dumps(duplicatedTwitterData)
      )

    for _ in range(30):
      metricTweetItem =  metricTweetsTable.lookup(
        twitterData[0]["text"],
        twitterData[0]["agg_ts"]
      )

      if metricTweetItem["copy_count"] != 1:
        time.sleep(1)
        continue

      # Assert same as original, except for copy_count, which should be 1

      self.assertEqual(metricTweetItem["username"], twitterData[0]["username"])
      self.assertEqual(metricTweetItem["tweet_uid"], twitterData[0]["tweet_uid"])
      self.assertEqual(metricTweetItem["created_at"], twitterData[0]["created_at"])
      self.assertEqual(metricTweetItem["agg_ts"], twitterData[0]["agg_ts"])
      self.assertEqual(metricTweetItem["text"], twitterData[0]["text"])
      self.assertEqual(metricTweetItem["userid"], twitterData[0]["userid"])
      self.assertEqual(metricTweetItem["username"], twitterData[0]["username"])
      self.assertEqual(metricTweetItem["retweet_count"], twitterData[0]["retweet_count"])
      self.assertEqual(metricTweetItem["sort_key"], ts + 1)

      break
    else:
      self.fail("copy_count of original tweet not updated within reasonable"
                " amount of time (~30s) for duplicated tweet.")

    # Delete metric and ensure metric is deleted from dynamodb, too
    self._deleteMetric(metricName)

    for _ in xrange(60):
      time.sleep(1)
      try:
        metricItem = metricTable.lookup(uid)
      except ItemNotFound as err:
        break
    else:
      self.fail("Metric not deleted from dynamodb")
コード例 #2
0
    def testPathwayToDynamoDB(self):
        """ Test metric data pathway to dynamodb
    """

        metricName = "TEST." + "".join(random.sample(string.ascii_letters, 16))

        nativeMetric = {
            "modelParams": {
                "minResolution": 0.2,
                "min": 0.0,
                "max": 10000.0,
            },
            "datasource": "custom",
            "metricSpec": {
                "metric": metricName,
                "resource": "Test",
                "userInfo": {
                    "symbol": "TEST",
                    "metricType": "TwitterVolume",
                    "metricTypeName": "Twitter Volume",
                }
            }
        }
        metricName = nativeMetric["metricSpec"]["metric"]
        instanceName = nativeMetric["metricSpec"]["resource"]
        userInfo = nativeMetric["metricSpec"]["userInfo"]

        now = datetime.datetime.utcnow().replace(minute=0,
                                                 second=0,
                                                 microsecond=0)

        data = [
            (5000.0, now - datetime.timedelta(minutes=10)),
            (6000.0, now - datetime.timedelta(minutes=5)),
            (7000.0, now),
        ]

        # We'll be explicitly deleting the metric below, but we need to add a
        # cleanup step that runs in case there is some other failure that prevents
        # that part of the test from being reached.

        def gracefulDelete():
            try:
                self._deleteMetric(metricName)
            except ObjectNotFoundError:
                pass

        self.addCleanup(gracefulDelete)

        # Add custom metric data
        sock = socket.socket()
        sock.connect(("localhost", self.plaintextPort))
        for metricValue, ts in data:
            sock.sendall(
                "%s %r %s\n" %
                (metricName, metricValue, epochFromNaiveUTCDatetime(ts)))

        self.gracefullyCloseSocket(sock)

        uid = self.checkMetricCreated(metricName)

        # Save the uid for later
        LOGGER.info("Metric %s has uid: %s", metricName, uid)

        # Send model creation request
        model = self._createModel(nativeMetric)
        parameters = json.loads(model.parameters)
        self.assertEqual(parameters["metricSpec"]["userInfo"], userInfo)

        for _ in xrange(60):
            with self.engine.begin() as conn:
                metric = repository.getMetric(conn, uid)

            if metric.status == MetricStatus.ACTIVE:
                break
            LOGGER.info("Model=%s not ready. Sleeping 1 second...", uid)
            time.sleep(1)
        else:
            self.fail("Model results not available within 5 minutes")

        # Check that the data all got processed
        self.checkModelResultsSize(uid, 3)

        # Now check that the data was published to dynamodb...
        dynamodb = DynamoDBService.connectDynamoDB()

        metricTable = Table(MetricDynamoDBDefinition().tableName,
                            connection=dynamodb)
        metricItem = metricTable.lookup(uid)
        self.assertEqual(metricItem["uid"], uid)
        self.assertEqual(metricItem["name"], metricName)
        self.assertEqual(metricItem["metricType"], "TwitterVolume")
        self.assertEqual(metricItem["metricTypeName"], "Twitter Volume")
        self.assertEqual(metricItem["symbol"], "TEST")

        metricDataTable = Table(MetricDataDynamoDBDefinition().tableName,
                                connection=dynamodb)
        instanceDataAnomalyScores = {}
        for metricValue, ts in data:
            metricDataItem = _RETRY_ON_ITEM_NOT_FOUND_DYNAMODB_ERROR(
                metricDataTable.lookup)(uid, ts.isoformat())
            # There is no server-side cleanup for metric data, so remove it here for
            # now to avoid accumulating test data
            self.addCleanup(metricDataItem.delete)
            self.assertEqual(metricValue, metricDataItem["metric_value"])
            dt = datetime.datetime.strptime(metricDataItem["timestamp"],
                                            "%Y-%m-%dT%H:%M:%S")
            self.assertEqual(ts, dt)
            ts = ts.replace(minute=0, second=0, microsecond=0)
            date = ts.strftime("%Y-%m-%d")
            hour = ts.strftime("%H")
            key = (date, hour)
            maxVal = instanceDataAnomalyScores.get(key, 0.0)
            instanceDataAnomalyScores[key] = max(
                maxVal, metricDataItem["anomaly_score"])

        # And check that the aggregated instance data is updated
        instanceDataHourlyTable = Table(
            InstanceDataHourlyDynamoDBDefinition().tableName,
            connection=dynamodb)
        for key, anomalyScore in instanceDataAnomalyScores.iteritems():
            date, hour = key
            instanceDataHourlyItem = _RETRY_ON_ITEM_NOT_FOUND_DYNAMODB_ERROR(
                instanceDataHourlyTable.lookup)(instanceName,
                                                "%sT%s" % (date, hour))
            self.addCleanup(instanceDataHourlyItem.delete)
            self.assertAlmostEqual(
                anomalyScore,
                float(
                    instanceDataHourlyItem["anomaly_score"]["TwitterVolume"]))
            self.assertEqual(date, instanceDataHourlyItem["date"])
            self.assertEqual(hour, instanceDataHourlyItem["hour"])

        # Now send some twitter data and validate that it made it to dynamodb

        twitterData = [{
            "metric_name": metricName,
            "tweet_uid": uid,
            "created_at": "2015-02-19T19:43:24.870109",
            "agg_ts": "2015-02-19T19:43:24.870118",
            "text": "Tweet text",
            "userid": "10",
            "username": "******",
            "retweet_count": "0"
        }]

        with MessageBusConnector() as messageBus:
            messageBus.publishExg(
                exchange=self.config.get("non_metric_data", "exchange_name"),
                routingKey=(
                    self.config.get("non_metric_data", "exchange_name") +
                    ".twitter"),
                body=json.dumps(twitterData))

        metricTweetsTable = Table(MetricTweetsDynamoDBDefinition().tableName,
                                  connection=dynamodb)
        metricTweetItem = metricTweetsTable.lookup(
            "-".join((metricName, uid)), "2015-02-19T19:43:24.870118")
        # There is no server-side cleanup for tweet data, so remove it here for
        # now to avoid accumulating test data
        self.addCleanup(metricTweetItem.delete)
        self.assertEqual(metricTweetItem["username"],
                         twitterData[0]["username"])
        self.assertEqual(metricTweetItem["tweet_uid"],
                         twitterData[0]["tweet_uid"])
        self.assertEqual(metricTweetItem["created_at"],
                         twitterData[0]["created_at"])
        self.assertEqual(metricTweetItem["agg_ts"], twitterData[0]["agg_ts"])
        self.assertEqual(metricTweetItem["text"], twitterData[0]["text"])
        self.assertEqual(metricTweetItem["userid"], twitterData[0]["userid"])
        self.assertEqual(metricTweetItem["username"],
                         twitterData[0]["username"])
        self.assertEqual(metricTweetItem["retweet_count"],
                         twitterData[0]["retweet_count"])

        queryResult = metricTweetsTable.query_2(
            metric_name__eq=metricName,
            agg_ts__eq=twitterData[0]["agg_ts"],
            index="taurus.metric_data-metric_name_index")
        queriedMetricTweetItem = next(queryResult)

        self.assertEqual(queriedMetricTweetItem["username"],
                         twitterData[0]["username"])
        self.assertEqual(queriedMetricTweetItem["tweet_uid"],
                         twitterData[0]["tweet_uid"])
        self.assertEqual(queriedMetricTweetItem["created_at"],
                         twitterData[0]["created_at"])
        self.assertEqual(queriedMetricTweetItem["agg_ts"],
                         twitterData[0]["agg_ts"])
        self.assertEqual(queriedMetricTweetItem["text"],
                         twitterData[0]["text"])
        self.assertEqual(queriedMetricTweetItem["userid"],
                         twitterData[0]["userid"])
        self.assertEqual(queriedMetricTweetItem["username"],
                         twitterData[0]["username"])
        self.assertEqual(queriedMetricTweetItem["retweet_count"],
                         twitterData[0]["retweet_count"])

        # Delete metric and ensure metric is deleted from dynamodb, too
        self._deleteMetric(metricName)

        for _ in xrange(60):
            time.sleep(1)
            try:
                metricItem = metricTable.lookup(uid)
            except ItemNotFound as err:
                break
        else:
            self.fail("Metric not deleted from dynamodb")