Esempio n. 1
0
    def testMonitorMetricCompleteModelParamsNoInferenceArgs(self):
        """ Test monitorMetric() raises ValueError with completeModelParams but
    not inferenceArgs. """
        metricName = "test-" + uuid.uuid1().hex

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        fileName = "custom_datasource_adapter_test_model_config.json"
        with self._openTestDataFile(fileName) as modelConfigFile:
            modelConfig = json.load(modelConfigFile)

        modelSpec = {
            "datasource": "custom",
            "metricSpec": {
                "metric": metricName
            },
            "completeModelParams": {
                "modelConfig": modelConfig,
                "timestampFieldName": "jon",
                "valueFieldName": "snow"
            }
        }

        with self.assertRaises(ValueError) as excCtx:
            adapter.monitorMetric(modelSpec)

        excArgZero = excCtx.exception.args[0]
        initialMsg = excArgZero[0:len(scalar_metric_utils.
                                      _NO_INFERENCE_ARGS_MSG)]
        self.assertEqual(initialMsg,
                         scalar_metric_utils._NO_INFERENCE_ARGS_MSG)
Esempio n. 2
0
    def testUnmonitorMetricPendingData(self):
        """ Test unmonitorMetric on metric in PENDING_DATA state """
        metricName = "test-" + uuid.uuid1().hex

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        g_log.info("Creating htmengine custom metric; name=%s", metricName)
        metricId = adapter.createMetric(metricName)
        self.addCleanup(adapter.deleteMetricByName, metricName)

        # Turn on monitoring
        modelSpec = {
            "datasource": "custom",
            "metricSpec": {
                "metric": metricName
            }
        }

        adapter.monitorMetric(modelSpec)

        with self.engine.connect() as conn:
            metricObj = repository.getMetric(
                conn,
                metricId,
                fields=[schema.metric.c.parameters, schema.metric.c.status])
        self.assertEqual(metricObj.status, MetricStatus.PENDING_DATA)
        self.assertEqual(json.loads(metricObj.parameters), modelSpec)

        self._validateModelSpec(json.loads(metricObj.parameters))

        # Turn off monitoring
        adapter.unmonitorMetric(metricId)

        self.checkMetricUnmonitoredById(metricId)
Esempio n. 3
0
    def testUnmonitorMetricWithModel(self):
        """ Test unmonitorMetric on metric with active model """
        metricName = "test-" + uuid.uuid1().hex

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        g_log.info("Creating htmengine custom metric: name=%s", metricName)
        metricId = adapter.createMetric(metricName)
        self.addCleanup(adapter.deleteMetricByName, metricName)

        # Turn on monitoring
        modelSpec = {
            "datasource": "custom",
            "metricSpec": {
                "metric": metricName
            },
            "modelParams": {
                "min": 0,  # optional
                "max": 100  # optional
            }
        }

        adapter.monitorMetric(modelSpec)

        g_log.info("Waiting for model to become active")
        self.checkModelIsActive(metricId)

        # Turn off monitoring
        g_log.info(
            "Unmonitoring htmengine custom metric with active model: "
            "name=%s", metricName)
        adapter.unmonitorMetric(metricId)
        self.checkMetricUnmonitoredById(metricId)
Esempio n. 4
0
    def testMonitorMetricCompleteModelParamsNoValueFieldName(self):
        """ Test monitorMetric() raises ValueError with completeModelParams but
    not valueFieldName. """
        metricName = "test-" + uuid.uuid1().hex

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        fileName = "custom_datasource_adapter_test_model_config.json"
        with self._openTestDataFile(fileName) as modelConfigFile:
            modelConfig = json.load(modelConfigFile)

        modelSpec = {
            "datasource": "custom",
            "metricSpec": {
                "metric": metricName
            },
            "completeModelParams": {
                "modelConfig": modelConfig,
                "inferenceArgs": {
                    "predictionSteps": [1],
                    "predictedField": "bachman",
                    "inputPredictedField": "auto"
                },
                "timestampFieldName": "erlich"
            }
        }

        with self.assertRaises(ValueError) as excCtx:
            adapter.monitorMetric(modelSpec)

        excArgZero = excCtx.exception.args[0]
        initialMsg = excArgZero[0:len(scalar_metric_utils.
                                      _NO_VALUE_FIELD_NAME_MSG)]
        self.assertEqual(initialMsg,
                         scalar_metric_utils._NO_VALUE_FIELD_NAME_MSG)
  def testMonitorMetricWithResource(self):
    """Test monitorMetric that includes an explicit resource string."""
    metricName = "test-" + uuid.uuid1().hex
    resource = "Test Resource"

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    g_log.info("Creating htmengine custom metric; name=%s", metricName)
    metricId = adapter.createMetric(metricName)
    self.addCleanup(adapter.deleteMetricByName, metricName)

    # Turn on monitoring
    modelSpec = {
      "datasource": "custom",

      "metricSpec": {
        "metric": metricName,
        "resource": resource,
      }
    }

    adapter.monitorMetric(modelSpec)

    with self.engine.connect() as conn:
      metricObj = repository.getMetric(conn,
                                       metricId,
                                       fields=[schema.metric.c.parameters,
                                               schema.metric.c.status,
                                               schema.metric.c.server])

    self.assertEqual(metricObj.status, MetricStatus.PENDING_DATA)
    self.assertEqual(json.loads(metricObj.parameters), modelSpec)
    self.assertEqual(metricObj.server, resource)

    self._validateModelSpec(json.loads(metricObj.parameters))
Esempio n. 6
0
def _addMetric(engine, metricName):
    """Add the new metric to the database."""
    if metricName in gCustomMetrics:
        try:
            # Attempt to reload the metric
            metricId = gCustomMetrics[metricName][0].uid
            with engine.connect() as conn:
                gCustomMetrics[metricName][0] = repository.getMetric(
                    conn, metricId)
            return
        except htmengine.exceptions.ObjectNotFoundError:
            # Do nothing, we will create new metric and update cache below
            pass

    # Use the adapter to create the metric
    try:
        metricId = createCustomDatasourceAdapter().createMetric(metricName)
    except htmengine.exceptions.MetricAlreadyExists as e:
        metricId = e.uid

    with engine.connect() as conn:
        metric = repository.getMetric(conn, metricId)

    # Add it to our cache
    gCustomMetrics[metricName] = [metric, datetime.datetime.utcnow()]

    _trimMetricCache()
  def testMonitorMetricNameMismatch(self):
    """ Test monitorMetric() raises ValueError when inferenceArgs-predictedField
    doesn't match valueFieldName. """
    metricName = "test-" + uuid.uuid1().hex

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    fileName = "custom_datasource_adapter_test_model_config.json"
    with self._openTestDataFile(fileName) as modelConfigFile:
      modelConfig = json.load(modelConfigFile)

    modelSpec = {
      "datasource": "custom",
      "metricSpec": {
        "metric": metricName
      },
      "completeModelParams": {
        "modelConfig": modelConfig,
        "inferenceArgs": {"predictionSteps": [1], "predictedField": "baz",
                          "inputPredictedField": "auto"},
        "timestampFieldName": "snorf",
        "valueFieldName": "bar"
      }
    }

    with self.assertRaises(ValueError) as excCtx:
      adapter.monitorMetric(modelSpec)

    excArgZero = excCtx.exception.args[0]
    initialMsg = excArgZero[0: len(
      scalar_metric_utils._INCONSISTENT_PREDICTED_FIELD_NAME_MSG)]
    self.assertEqual(initialMsg,
                     scalar_metric_utils._INCONSISTENT_PREDICTED_FIELD_NAME_MSG)
  def testMonitorMetricCompleteModelParamsNoInferenceArgs(self):
    """ Test monitorMetric() raises ValueError with completeModelParams but
    not inferenceArgs. """
    metricName = "test-" + uuid.uuid1().hex

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    fileName = "custom_datasource_adapter_test_model_config.json"
    with self._openTestDataFile(fileName) as modelConfigFile:
      modelConfig = json.load(modelConfigFile)

    modelSpec = {
      "datasource": "custom",
      "metricSpec": {
        "metric": metricName
      },
      "completeModelParams": {
        "modelConfig": modelConfig,
        "timestampFieldName": "jon",
        "valueFieldName": "snow"
      }
    }

    with self.assertRaises(ValueError) as excCtx:
      adapter.monitorMetric(modelSpec)

    excArgZero = excCtx.exception.args[0]
    initialMsg = excArgZero[0: len(
      scalar_metric_utils._NO_INFERENCE_ARGS_MSG)]
    self.assertEqual(initialMsg,
                     scalar_metric_utils._NO_INFERENCE_ARGS_MSG)
  def testUnmonitorMetricPendingData(self):
    """ Test unmonitorMetric on metric in PENDING_DATA state """
    metricName = "test-" + uuid.uuid1().hex

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    g_log.info("Creating htmengine custom metric; name=%s", metricName)
    metricId = adapter.createMetric(metricName)
    self.addCleanup(adapter.deleteMetricByName, metricName)

    # Turn on monitoring
    modelSpec = {
      "datasource": "custom",

      "metricSpec": {
        "metric": metricName
      }
    }

    adapter.monitorMetric(modelSpec)

    with self.engine.connect() as conn:
      metricObj = repository.getMetric(conn,
                                       metricId,
                                       fields=[schema.metric.c.parameters,
                                               schema.metric.c.status])
    self.assertEqual(metricObj.status, MetricStatus.PENDING_DATA)
    self.assertEqual(json.loads(metricObj.parameters), modelSpec)

    self._validateModelSpec(json.loads(metricObj.parameters))

    # Turn off monitoring
    adapter.unmonitorMetric(metricId)

    self.checkMetricUnmonitoredById(metricId)
  def testMonitorMetricThatIsAlreadyMonitored(self):
    """ monitorMetric should raise if already monitored """
    metricName = "test-" + uuid.uuid1().hex

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    g_log.info("Creating htmengine custom metric; name=%s", metricName)
    adapter.createMetric(metricName)
    self.addCleanup(adapter.deleteMetricByName, metricName)

    # Turn on monitoring
    modelSpec = {
      "datasource": "custom",

      "metricSpec": {
        "metric": metricName
      }
    }

    modelId = adapter.monitorMetric(modelSpec)

    with self.assertRaises(app_exceptions.MetricAlreadyMonitored) as cm:
      adapter.monitorMetric(modelSpec)

    self.assertEqual(cm.exception.uid, modelId)
  def testUnmonitorMetricWithModel(self):
    """ Test unmonitorMetric on metric with active model """
    metricName = "test-" + uuid.uuid1().hex

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    g_log.info("Creating htmengine custom metric: name=%s", metricName)
    metricId = adapter.createMetric(metricName)
    self.addCleanup(adapter.deleteMetricByName, metricName)

    # Turn on monitoring
    modelSpec = {
      "datasource": "custom",

      "metricSpec": {
        "metric": metricName
      },

      "modelParams": {
        "min": 0,  # optional
        "max": 100  # optional
      }
    }

    adapter.monitorMetric(modelSpec)

    g_log.info("Waiting for model to become active")
    self.checkModelIsActive(metricId)

    # Turn off monitoring
    g_log.info("Unmonitoring htmengine custom metric with active model: "
               "name=%s",
               metricName)
    adapter.unmonitorMetric(metricId)
    self.checkMetricUnmonitoredById(metricId)
Esempio n. 12
0
def _addMetric(engine, metricName):
  """Add the new metric to the database."""
  if metricName in gCustomMetrics:
    try:
      # Attempt to reload the metric
      metricId = gCustomMetrics[metricName][0].uid
      with engine.connect() as conn:
        gCustomMetrics[metricName][0] = repository.getMetric(conn, metricId)
      return
    except htmengine.exceptions.ObjectNotFoundError:
      # Do nothing, we will create new metric and update cache below
      pass

  # Use the adapter to create the metric
  try:
    metricId = createCustomDatasourceAdapter().createMetric(metricName)
  except htmengine.exceptions.MetricAlreadyExists as e:
    metricId = e.uid

  with engine.connect() as conn:
    metric = repository.getMetric(conn, metricId)

  # Add it to our cache
  gCustomMetrics[metricName] = [metric, datetime.datetime.utcnow()]

  _trimMetricCache()
Esempio n. 13
0
    def testExportImport(self):
        metricName = "test-" + uuid.uuid1().hex

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        g_log.info("Creating htmengine custom metric; name=%s", metricName)
        metricId = adapter.createMetric(metricName)
        self.addCleanup(adapter.deleteMetricByName, metricName)

        # Add some data
        # NOTE: we discard the fractional part because it gets eliminated
        # in the database, and we will want to compare against retrieved
        # items later.
        now = datetime.datetime.utcnow().replace(microsecond=0)
        data = [(0, now - datetime.timedelta(minutes=5)), (100, now)]

        with self.engine.connect() as conn:
            repository.addMetricData(conn, metricId, data)

        # Turn on monitoring
        modelSpec = {
            "datasource": "custom",
            "metricSpec": {
                "metric": metricName
            },
        }

        adapter.monitorMetric(modelSpec)

        def checkExportSpec(exportSpec):
            self.assertEqual(exportSpec["datasource"], modelSpec["datasource"])
            self.assertEqual(exportSpec["metricSpec"], modelSpec["metricSpec"])
            self.assertSequenceEqual(exportSpec["data"], data)

        # Export
        exportSpec = adapter.exportModel(metricId)
        checkExportSpec(exportSpec)

        # Delete metric
        adapter.deleteMetricByName(metricName)
        self.checkModelDeleted(metricId)

        # Import
        metricId = adapter.importModel(
            htmengine.utils.jsonDecode(htmengine.utils.jsonEncode(exportSpec)))

        with self.engine.connect() as conn:
            metricObj = repository.getMetric(
                conn,
                metricId,
                fields=[schema.metric.c.parameters, schema.metric.c.status])
        self.assertEqual(metricObj.status, MetricStatus.PENDING_DATA)
        self.assertEqual(json.loads(metricObj.parameters), modelSpec)

        self._validateModelSpec(json.loads(metricObj.parameters))

        # Export again
        exportSpec = adapter.exportModel(metricId)
        checkExportSpec(exportSpec)
  def testActivateModelClassifierEnabled(self):
    """ Test activateModel with classifier enabled in model spec. """
    metricName = "test-" + uuid.uuid1().hex

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    g_log.info("Creating htmengine custom metric; name=%s", metricName)
    metricId = adapter.createMetric(metricName)
    self.addCleanup(adapter.deleteMetricByName, metricName)

    # Turn on monitoring
    modelSpec = {
      "datasource": "custom",
      "metricSpec": {
        "metric": metricName
      },
      "modelParams": {
        "enableClassifier": True
      }
    }

    adapter.monitorMetric(modelSpec)

    with self.engine.connect() as conn:
      metricObj = repository.getMetric(conn,
                                       metricId,
                                       fields=[schema.metric.c.status])
    self.assertEqual(metricObj.status, MetricStatus.PENDING_DATA)

    # Add some data
    data = [
      (0, datetime.datetime.utcnow() - datetime.timedelta(minutes=5)),
      (100, datetime.datetime.utcnow())
    ]
    with self.engine.connect() as conn:
      repository.addMetricData(conn, metricId, data)

    # Activate model
    adapter.activateModel(metricId)

    with self.engine.connect() as conn:
      metricObj = repository.getMetric(conn,
                                       metricId,
                                       fields=[schema.metric.c.status,
                                               schema.metric.c.model_params])
    self.assertIn(metricObj.status, (MetricStatus.CREATE_PENDING,
                                     MetricStatus.ACTIVE))

    self._assertClassifierStatusInModelParams(metricObj.model_params,
                                              classifierEnabled=True)

    g_log.info("Waiting for model to become active")
    self.checkModelIsActive(metricId)

    g_log.info("Waiting at least one model result")
    self.checkModelResultsSize(metricId, 1, atLeast=True)
Esempio n. 15
0
    def testActivateModelClassifierEnabled(self):
        """ Test activateModel with classifier enabled in model spec. """
        metricName = "test-" + uuid.uuid1().hex

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        g_log.info("Creating htmengine custom metric; name=%s", metricName)
        metricId = adapter.createMetric(metricName)
        self.addCleanup(adapter.deleteMetricByName, metricName)

        # Turn on monitoring
        modelSpec = {
            "datasource": "custom",
            "metricSpec": {
                "metric": metricName
            },
            "modelParams": {
                "enableClassifier": True
            }
        }

        adapter.monitorMetric(modelSpec)

        with self.engine.connect() as conn:
            metricObj = repository.getMetric(conn,
                                             metricId,
                                             fields=[schema.metric.c.status])
        self.assertEqual(metricObj.status, MetricStatus.PENDING_DATA)

        # Add some data
        data = [(0,
                 datetime.datetime.utcnow() - datetime.timedelta(minutes=5)),
                (100, datetime.datetime.utcnow())]
        with self.engine.connect() as conn:
            repository.addMetricData(conn, metricId, data)

        # Activate model
        adapter.activateModel(metricId)

        with self.engine.connect() as conn:
            metricObj = repository.getMetric(
                conn,
                metricId,
                fields=[schema.metric.c.status, schema.metric.c.model_params])
        self.assertIn(metricObj.status,
                      (MetricStatus.CREATE_PENDING, MetricStatus.ACTIVE))

        self._assertClassifierStatusInModelParams(metricObj.model_params,
                                                  classifierEnabled=True)

        g_log.info("Waiting for model to become active")
        self.checkModelIsActive(metricId)

        g_log.info("Waiting at least one model result")
        self.checkModelResultsSize(metricId, 1, atLeast=True)
  def testDeleteMetricByNameUnmonitored(self):
    """ Test deletion of unmonitored metric """
    metricName = "test-" + uuid.uuid1().hex

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    g_log.info("Creating grok custom metric; name=%s", metricName)
    metricId = adapter.createMetric(metricName)

    g_log.info("Deleteing unmonitored grok custom metric; name=%s", metricName)
    adapter.deleteMetricByName(metricName)
    g_log.info("Waiting for model to complete deletion")
    self.checkModelDeleted(metricId)
Esempio n. 17
0
    def testDeleteMetricByNameUnmonitored(self):
        """ Test deletion of unmonitored metric """
        metricName = "test-" + uuid.uuid1().hex

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        g_log.info("Creating htmengine custom metric; name=%s", metricName)
        metricId = adapter.createMetric(metricName)

        g_log.info("Deleteing unmonitored htmengine custom metric; name=%s",
                   metricName)
        adapter.deleteMetricByName(metricName)
        g_log.info("Waiting for model to complete deletion")
        self.checkModelDeleted(metricId)
Esempio n. 18
0
    def testCreateMetricThatAlreadyExists(self):
        """ Creating a custom metric with name that already exists should raise """
        metricName = "test-" + uuid.uuid1().hex

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        g_log.info("Creating htmengine custom metric; name=%s", metricName)
        metricId = adapter.createMetric(metricName)
        self.addCleanup(adapter.deleteMetricByName, metricName)

        with self.assertRaises(app_exceptions.MetricAlreadyExists) as cm:
            adapter.createMetric(metricName)

        self.assertEqual(cm.exception.uid, metricId)
  def testCreateMetricThatAlreadyExists(self):
    """ Creating a custom metric with name that already exists should raise """
    metricName = "test-" + uuid.uuid1().hex

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    g_log.info("Creating htmengine custom metric; name=%s", metricName)
    metricId = adapter.createMetric(metricName)
    self.addCleanup(adapter.deleteMetricByName, metricName)

    with self.assertRaises(app_exceptions.MetricAlreadyExists) as cm:
      adapter.createMetric(metricName)

    self.assertEqual(cm.exception.uid, metricId)
Esempio n. 20
0
    def testMonitorMetricWithCompleteModelParams(self):
        """ Test monitorMetric with complete set of user-provided model parameters
    that activates a model """
        metricName = "test-" + uuid.uuid1().hex

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        g_log.info("Creating htmengine custom metric; name=%s", metricName)
        metricId = adapter.createMetric(metricName)
        self.addCleanup(adapter.deleteMetricByName, metricName)

        fileName = "custom_datasource_adapter_test_model_config.json"
        with self._openTestDataFile(fileName) as modelConfigFile:
            modelConfig = json.load(modelConfigFile)

        # Turn on monitoring
        modelSpec = {
            "datasource": "custom",
            "metricSpec": {
                "metric": metricName
            },
            "completeModelParams": {
                "modelConfig": modelConfig,
                "inferenceArgs": {
                    "predictionSteps": [1],
                    "predictedField": "bar",
                    "inputPredictedField": "auto"
                },
                "timestampFieldName": "foo",
                "valueFieldName": "bar"
            }
        }

        adapter.monitorMetric(modelSpec)

        with self.engine.connect() as conn:
            metricObj = repository.getMetric(
                conn,
                metricId,
                fields=[schema.metric.c.status, schema.metric.c.parameters])

        self._validateModelSpec(json.loads(metricObj.parameters))

        self.assertIn(metricObj.status,
                      (MetricStatus.CREATE_PENDING, MetricStatus.ACTIVE))
        self.assertEqual(json.loads(metricObj.parameters), modelSpec)

        g_log.info("Waiting for model to become active")
        self.checkModelIsActive(metricId)
  def testMonitorMetricWithEnoughDataForStats(self):
    """ monitorMetric should create a model when there is enough data rows """
    metricName = "test-" + uuid.uuid1().hex

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    g_log.info("Creating htmengine custom metric; name=%s", metricName)
    metricId = adapter.createMetric(metricName)
    self.addCleanup(adapter.deleteMetricByName, metricName)

    # Add enough data to force activation of model
    data = [
      (offset, datetime.datetime.utcnow() + datetime.timedelta(minutes=offset))
      for offset in xrange(
        0,
        scalar_metric_utils.MODEL_CREATION_RECORD_THRESHOLD * 5,
        5)
    ]
    self.assertEqual(len(data),
                     scalar_metric_utils.MODEL_CREATION_RECORD_THRESHOLD)

    with self.engine.connect() as conn:
      repository.addMetricData(conn, metricId, data)

    # Turn on monitoring
    modelSpec = {
      "datasource": "custom",

      "metricSpec": {
        "metric": metricName
      },
    }

    adapter.monitorMetric(modelSpec)

    with self.engine.connect() as conn:
      metricObj = repository.getMetric(conn,
                                       metricId,
                                       fields=[schema.metric.c.status])

    self.assertIn(metricObj.status, (MetricStatus.CREATE_PENDING,
                                     MetricStatus.ACTIVE))

    g_log.info("Waiting for model to become active")
    self.checkModelIsActive(metricId)

    g_log.info("Waiting at least one model result")
    self.checkModelResultsSize(metricId, 1, atLeast=True)
  def testMonitorMetricWithMinResolution(self):
    """
    Test monitorMetric with user-provided min/max and minResolution
    that activates a model.
    Make sure resolution doesn't drop below minResolution.
    """
    metricName = "test-" + uuid.uuid1().hex

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    g_log.info("Creating htmengine custom metric; name=%s", metricName)
    metricId = adapter.createMetric(metricName)
    self.addCleanup(adapter.deleteMetricByName, metricName)

    # Turn on monitoring
    modelSpec = {
      "datasource": "custom",

      "metricSpec": {
        "metric": metricName
      },

      "modelParams": {
        "min": 0,  # optional
        "max": 1,  # optional
        "minResolution": 0.5 # optional
      }
    }

    adapter.monitorMetric(modelSpec)

    with self.engine.connect() as conn:
      metricObj = repository.getMetric(conn,
                                       metricId,
                                       fields=[schema.metric.c.status,
                                               schema.metric.c.parameters])
    self.assertIn(metricObj.status, (MetricStatus.CREATE_PENDING,
                                     MetricStatus.ACTIVE))
    self.assertEqual(json.loads(metricObj.parameters), modelSpec)
    #print metricObj.parameters

    self._validateModelSpec(json.loads(metricObj.parameters))

    g_log.info("Waiting for model to become active")
    self.checkModelIsActive(metricId)
    self.checkEncoderResolution(metricId, 0, 1, minResolution=0.5)
  def testMonitorMetricWithCompleteModelParams(self):
    """ Test monitorMetric with complete set of user-provided model parameters
    that activates a model """
    metricName = "test-" + uuid.uuid1().hex

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    g_log.info("Creating htmengine custom metric; name=%s", metricName)
    metricId = adapter.createMetric(metricName)
    self.addCleanup(adapter.deleteMetricByName, metricName)

    fileName = "custom_datasource_adapter_test_model_config.json"
    with self._openTestDataFile(fileName) as modelConfigFile:
      modelConfig = json.load(modelConfigFile)

    # Turn on monitoring
    modelSpec = {
      "datasource": "custom",
      "metricSpec": {
        "metric": metricName
      },
      "completeModelParams": {
        "modelConfig": modelConfig,
        "inferenceArgs": {"predictionSteps": [1], "predictedField": "bar",
                          "inputPredictedField": "auto"},
        "timestampFieldName": "foo",
        "valueFieldName": "bar"
      }
    }

    adapter.monitorMetric(modelSpec)

    with self.engine.connect() as conn:
      metricObj = repository.getMetric(conn,
                                       metricId,
                                       fields=[schema.metric.c.status,
                                               schema.metric.c.parameters])

    self._validateModelSpec(json.loads(metricObj.parameters))

    self.assertIn(metricObj.status, (MetricStatus.CREATE_PENDING,
                                     MetricStatus.ACTIVE))
    self.assertEqual(json.loads(metricObj.parameters), modelSpec)

    g_log.info("Waiting for model to become active")
    self.checkModelIsActive(metricId)
Esempio n. 24
0
    def testMonitorMetricWithEnoughDataForStats(self):
        """ monitorMetric should create a model when there is enough data rows """
        metricName = "test-" + uuid.uuid1().hex

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        g_log.info("Creating htmengine custom metric; name=%s", metricName)
        metricId = adapter.createMetric(metricName)
        self.addCleanup(adapter.deleteMetricByName, metricName)

        # Add enough data to force activation of model
        data = [
            (offset,
             datetime.datetime.utcnow() + datetime.timedelta(minutes=offset))
            for offset in xrange(
                0, scalar_metric_utils.MODEL_CREATION_RECORD_THRESHOLD * 5, 5)
        ]
        self.assertEqual(len(data),
                         scalar_metric_utils.MODEL_CREATION_RECORD_THRESHOLD)

        with self.engine.connect() as conn:
            repository.addMetricData(conn, metricId, data)

        # Turn on monitoring
        modelSpec = {
            "datasource": "custom",
            "metricSpec": {
                "metric": metricName
            },
        }

        adapter.monitorMetric(modelSpec)

        with self.engine.connect() as conn:
            metricObj = repository.getMetric(conn,
                                             metricId,
                                             fields=[schema.metric.c.status])

        self.assertIn(metricObj.status,
                      (MetricStatus.CREATE_PENDING, MetricStatus.ACTIVE))

        g_log.info("Waiting for model to become active")
        self.checkModelIsActive(metricId)

        g_log.info("Waiting at least one model result")
        self.checkModelResultsSize(metricId, 1, atLeast=True)
Esempio n. 25
0
    def testMonitorMetricClassifierEnabled(self):
        """ Test monitorMetric with request for enabled classifier in model
    params """
        metricName = "test-" + uuid.uuid1().hex

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        g_log.info("Creating htmengine custom metric; name=%s", metricName)
        metricId = adapter.createMetric(metricName)
        self.addCleanup(adapter.deleteMetricByName, metricName)

        # Turn on monitoring
        modelSpec = {
            "datasource": "custom",
            "metricSpec": {
                "metric": metricName
            },
            "modelParams": {
                "min": 0,  # optional
                "max": 100,  # optional
                "enableClassifier": True
            }
        }

        adapter.monitorMetric(modelSpec)

        with self.engine.connect() as conn:
            metricObj = repository.getMetric(conn,
                                             metricId,
                                             fields=[
                                                 schema.metric.c.status,
                                                 schema.metric.c.parameters,
                                                 schema.metric.c.model_params
                                             ])
        self.assertEqual(metricObj.status, MetricStatus.CREATE_PENDING)
        self.assertEqual(json.loads(metricObj.parameters), modelSpec)

        self._assertClassifierStatusInModelParams(metricObj.model_params,
                                                  classifierEnabled=True)

        self._validateModelSpec(json.loads(metricObj.parameters))

        g_log.info("Waiting for model to become active")
        self.checkModelIsActive(metricId)
        self.checkEncoderResolution(metricId, 0, 100)
  def testMonitorMetricClassifierEnabled(self):
    """ Test monitorMetric with request for enabled classifier in model
    params """
    metricName = "test-" + uuid.uuid1().hex

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    g_log.info("Creating htmengine custom metric; name=%s", metricName)
    metricId = adapter.createMetric(metricName)
    self.addCleanup(adapter.deleteMetricByName, metricName)

    # Turn on monitoring
    modelSpec = {
      "datasource": "custom",

      "metricSpec": {
        "metric": metricName
      },

      "modelParams": {
        "min": 0,  # optional
        "max": 100,  # optional
        "enableClassifier": True
      }
    }

    adapter.monitorMetric(modelSpec)

    with self.engine.connect() as conn:
      metricObj = repository.getMetric(conn,
                                       metricId,
                                       fields=[schema.metric.c.status,
                                               schema.metric.c.parameters,
                                               schema.metric.c.model_params])
    self.assertEqual(metricObj.status, MetricStatus.CREATE_PENDING)
    self.assertEqual(json.loads(metricObj.parameters), modelSpec)

    self._assertClassifierStatusInModelParams(metricObj.model_params,
                                              classifierEnabled=True)

    self._validateModelSpec(json.loads(metricObj.parameters))

    g_log.info("Waiting for model to become active")
    self.checkModelIsActive(metricId)
    self.checkEncoderResolution(metricId, 0, 100)
Esempio n. 27
0
    def testMonitorMetricWithMinResolution(self):
        """
    Test monitorMetric with user-provided min/max and minResolution
    that activates a model.
    Make sure resolution doesn't drop below minResolution.
    """
        metricName = "test-" + uuid.uuid1().hex

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        g_log.info("Creating htmengine custom metric; name=%s", metricName)
        metricId = adapter.createMetric(metricName)
        self.addCleanup(adapter.deleteMetricByName, metricName)

        # Turn on monitoring
        modelSpec = {
            "datasource": "custom",
            "metricSpec": {
                "metric": metricName
            },
            "modelParams": {
                "min": 0,  # optional
                "max": 1,  # optional
                "minResolution": 0.5  # optional
            }
        }

        adapter.monitorMetric(modelSpec)

        with self.engine.connect() as conn:
            metricObj = repository.getMetric(
                conn,
                metricId,
                fields=[schema.metric.c.status, schema.metric.c.parameters])
        self.assertIn(metricObj.status,
                      (MetricStatus.CREATE_PENDING, MetricStatus.ACTIVE))
        self.assertEqual(json.loads(metricObj.parameters), modelSpec)
        #print metricObj.parameters

        self._validateModelSpec(json.loads(metricObj.parameters))

        g_log.info("Waiting for model to become active")
        self.checkModelIsActive(metricId)
        self.checkEncoderResolution(metricId, 0, 1, minResolution=0.5)
  def testCreateMetric(self):
    """ Test creation of custom metric """
    metricName = "test-" + uuid.uuid1().hex

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    g_log.info("Creating htmengine custom metric; name=%s", metricName)
    metricId = adapter.createMetric(metricName)
    self.addCleanup(adapter.deleteMetricByName, metricName)

    with self.engine.connect() as conn:
      metricObj = repository.getMetric(conn,
                                       metricId,
                                       fields=[schema.metric.c.name,
                                               schema.metric.c.datasource,
                                               schema.metric.c.status])

    self.assertEqual(metricObj.name, metricName)
    self.assertEqual(metricObj.datasource, "custom")
    self.assertEqual(metricObj.status, MetricStatus.UNMONITORED)
Esempio n. 29
0
    def testCreateMetric(self):
        """ Test creation of custom metric """
        metricName = "test-" + uuid.uuid1().hex

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        g_log.info("Creating htmengine custom metric; name=%s", metricName)
        metricId = adapter.createMetric(metricName)
        self.addCleanup(adapter.deleteMetricByName, metricName)

        with self.engine.connect() as conn:
            metricObj = repository.getMetric(conn,
                                             metricId,
                                             fields=[
                                                 schema.metric.c.name,
                                                 schema.metric.c.datasource,
                                                 schema.metric.c.status
                                             ])

        self.assertEqual(metricObj.name, metricName)
        self.assertEqual(metricObj.datasource, "custom")
        self.assertEqual(metricObj.status, MetricStatus.UNMONITORED)
Esempio n. 30
0
    def testMonitorMetricModelParamsAndCompleteModelParams(self):
        """ Test monitorMetric() raises ValueError for mutually exclusive model
     params input options. """
        metricName = "test-" + uuid.uuid1().hex

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        fileName = "custom_datasource_adapter_test_model_config.json"
        with self._openTestDataFile(fileName) as modelConfigFile:
            modelConfig = json.load(modelConfigFile)

        modelSpec = {
            "datasource": "custom",
            "metricSpec": {
                "metric": metricName
            },
            "completeModelParams": {
                "modelConfig": modelConfig,
                "inferenceArgs": {
                    "predictionSteps": [1],
                    "predictedField": "bar",
                    "inputPredictedField": "auto"
                },
                "timestampFieldName": "foo",
                "valueFieldName": "bar"
            },
            "modelParams": {
                "min": 0,
                "max": 100
            }
        }

        with self.assertRaises(ValueError) as excCtx:
            adapter.monitorMetric(modelSpec)

        excArgZero = excCtx.exception.args[0]
        initialMsg = excArgZero[0:len(scalar_metric_utils._MUTEX_MODEL_SPEC_MSG
                                      )]
        self.assertEqual(initialMsg, scalar_metric_utils._MUTEX_MODEL_SPEC_MSG)
  def testDeleteMetricWithModel(self):
    """ Test monitorMetric with user-provided min/max that activates a model """
    metricName = "test-" + uuid.uuid1().hex

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    g_log.info("Creating htmengine custom metric: name=%s", metricName)
    metricId = adapter.createMetric(metricName)
    try:
      # Turn on monitoring
      modelSpec = {
        "datasource": "custom",

        "metricSpec": {
          "metric": metricName
        },

        "modelParams": {
          "min": 0,  # optional
          "max": 100  # optional
        }
      }

      adapter.monitorMetric(modelSpec)

      g_log.info("Waiting for model to become active")
      self.checkModelIsActive(metricId)

      g_log.info("Deleteing htmengine custom metric with active model: "
                 "name=%s",
                 metricName)
      adapter.deleteMetricByName(metricName)
      g_log.info("Waiting for model to complete deletion")
      self.checkModelDeleted(metricId)

    except:  # pylint: disable=W0702
      g_log.exception("Something went wrong")
      adapter.deleteMetricByName(metricName)
  def testMonitorMetricModelParamsAndCompleteModelParams(self):
    """ Test monitorMetric() raises ValueError for mutually exclusive model
     params input options. """
    metricName = "test-" + uuid.uuid1().hex

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    fileName = "custom_datasource_adapter_test_model_config.json"
    with self._openTestDataFile(fileName) as modelConfigFile:
      modelConfig = json.load(modelConfigFile)

    modelSpec = {
      "datasource": "custom",
      "metricSpec": {
        "metric": metricName
      },
      "completeModelParams": {
        "modelConfig": modelConfig,
        "inferenceArgs": {"predictionSteps": [1], "predictedField": "bar",
                          "inputPredictedField": "auto"},
        "timestampFieldName": "foo",
        "valueFieldName": "bar"
      },
      "modelParams": {
        "min": 0,
        "max": 100
      }
    }

    with self.assertRaises(ValueError) as excCtx:
      adapter.monitorMetric(modelSpec)

    excArgZero = excCtx.exception.args[0]
    initialMsg = excArgZero[0: len(
      scalar_metric_utils._MUTEX_MODEL_SPEC_MSG)]
    self.assertEqual(initialMsg,
                     scalar_metric_utils._MUTEX_MODEL_SPEC_MSG)
Esempio n. 33
0
    def testMonitorMetricWithUserInfo(self):
        """Test monitorMetric that includes an explicit userInfo property in
    metricSpec.
    """
        metricName = "test-" + uuid.uuid1().hex
        userInfo = {"symbol": "test-user-info"}

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        g_log.info("Creating htmengine custom metric; name=%s", metricName)
        metricId = adapter.createMetric(metricName)
        self.addCleanup(adapter.deleteMetricByName, metricName)

        # Turn on monitoring
        modelSpec = {
            "datasource": "custom",
            "metricSpec": {
                "metric": metricName,
                "userInfo": userInfo
            }
        }

        adapter.monitorMetric(modelSpec)

        with self.engine.connect() as conn:
            metricObj = repository.getMetric(conn,
                                             metricId,
                                             fields=[
                                                 schema.metric.c.parameters,
                                                 schema.metric.c.status,
                                                 schema.metric.c.server
                                             ])

        self.assertEqual(metricObj.status, MetricStatus.PENDING_DATA)
        self.assertEqual(json.loads(metricObj.parameters), modelSpec)

        self._validateModelSpec(json.loads(metricObj.parameters))
Esempio n. 34
0
    def testDeleteMetricWithModel(self):
        """ Test monitorMetric with user-provided min/max that activates a model """
        metricName = "test-" + uuid.uuid1().hex

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        g_log.info("Creating htmengine custom metric: name=%s", metricName)
        metricId = adapter.createMetric(metricName)
        try:
            # Turn on monitoring
            modelSpec = {
                "datasource": "custom",
                "metricSpec": {
                    "metric": metricName
                },
                "modelParams": {
                    "min": 0,  # optional
                    "max": 100  # optional
                }
            }

            adapter.monitorMetric(modelSpec)

            g_log.info("Waiting for model to become active")
            self.checkModelIsActive(metricId)

            g_log.info(
                "Deleting htmengine custom metric with active model: "
                "name=%s", metricName)
            adapter.deleteMetricByName(metricName)
            g_log.info("Waiting for model to complete deletion")
            self.checkModelDeleted(metricId)

        except:  # pylint: disable=W0702
            g_log.exception("Something went wrong")
            adapter.deleteMetricByName(metricName)
Esempio n. 35
0
    def testMonitorMetricThatIsAlreadyMonitored(self):
        """ monitorMetric should raise if already monitored """
        metricName = "test-" + uuid.uuid1().hex

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        g_log.info("Creating htmengine custom metric; name=%s", metricName)
        adapter.createMetric(metricName)
        self.addCleanup(adapter.deleteMetricByName, metricName)

        # Turn on monitoring
        modelSpec = {
            "datasource": "custom",
            "metricSpec": {
                "metric": metricName
            }
        }

        modelId = adapter.monitorMetric(modelSpec)

        with self.assertRaises(app_exceptions.MetricAlreadyMonitored) as cm:
            adapter.monitorMetric(modelSpec)

        self.assertEqual(cm.exception.uid, modelId)
  def testExportImport(self):
    metricName = "test-" + uuid.uuid1().hex

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    g_log.info("Creating htmengine custom metric; name=%s", metricName)
    metricId = adapter.createMetric(metricName)
    self.addCleanup(adapter.deleteMetricByName, metricName)

    # Add some data
    # NOTE: we discard the fractional part because it gets eliminated
    # in the database, and we will want to compare against retrieved
    # items later.
    now = datetime.datetime.utcnow().replace(microsecond=0)
    data = [
      (0, now - datetime.timedelta(minutes=5)),
      (100, now)
    ]

    with self.engine.connect() as conn:
      repository.addMetricData(conn, metricId, data)

    # Turn on monitoring
    modelSpec = {
      "datasource": "custom",

      "metricSpec": {
        "metric": metricName
      },
    }

    adapter.monitorMetric(modelSpec)

    def checkExportSpec(exportSpec):
      self.assertEqual(exportSpec["datasource"], modelSpec["datasource"])
      self.assertEqual(exportSpec["metricSpec"], modelSpec["metricSpec"])
      self.assertSequenceEqual(exportSpec["data"], data)

    # Export
    exportSpec = adapter.exportModel(metricId)
    checkExportSpec(exportSpec)

    # Delete metric
    adapter.deleteMetricByName(metricName)
    self.checkModelDeleted(metricId)

    # Import
    metricId = adapter.importModel(
      htmengine.utils.jsonDecode(htmengine.utils.jsonEncode(exportSpec)))

    with self.engine.connect() as conn:
      metricObj = repository.getMetric(conn,
                                       metricId,
                                       fields=[schema.metric.c.parameters,
                                               schema.metric.c.status])
    self.assertEqual(metricObj.status, MetricStatus.PENDING_DATA)
    self.assertEqual(json.loads(metricObj.parameters), modelSpec)

    self._validateModelSpec(json.loads(metricObj.parameters))

    # Export again
    exportSpec = adapter.exportModel(metricId)
    checkExportSpec(exportSpec)
  def testExportImportCompleteModelParams(self):
    metricName = "test-" + uuid.uuid1().hex

    adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

    g_log.info("Creating htmengine custom metric; name=%s", metricName)
    metricId = adapter.createMetric(metricName)
    self.addCleanup(adapter.deleteMetricByName, metricName)

    # Add some data
    # NOTE: we discard the fractional part because it gets eliminated
    # in the database, and we will want to compare against retrieved
    # items later.
    now = datetime.datetime.utcnow().replace(microsecond=0)
    data = [
      (0, now - datetime.timedelta(minutes=5)),
      (100, now)
    ]

    with self.engine.connect() as conn:
      repository.addMetricData(conn, metricId, data)

    fileName = "custom_datasource_adapter_test_model_config.json"
    with self._openTestDataFile(fileName) as modelConfigFile:
      modelConfig = json.load(modelConfigFile)

    # Turn on monitoring
    modelSpec = {
      "datasource": "custom",
      "metricSpec": {
        "metric": metricName
      },
      "completeModelParams": {
        "modelConfig": modelConfig,
        "inferenceArgs": {"predictionSteps": [1], "predictedField": "bar",
                          "inputPredictedField": "auto"},
        "timestampFieldName": "foo",
        "valueFieldName": "bar"
      }
    }

    adapter.monitorMetric(modelSpec)

    def checkExportSpec(exportSpec):
      self.assertEqual(exportSpec["datasource"], modelSpec["datasource"])
      self.assertEqual(exportSpec["metricSpec"], modelSpec["metricSpec"])
      self.assertSequenceEqual(exportSpec["data"], data)

    # Export
    exportSpec = adapter.exportModel(metricId)
    checkExportSpec(exportSpec)

    # Delete metric
    adapter.deleteMetricByName(metricName)
    self.checkModelDeleted(metricId)

    # Import
    metricId = adapter.importModel(
      htmengine.utils.jsonDecode(htmengine.utils.jsonEncode(exportSpec)))

    with self.engine.connect() as conn:
      metricObj = repository.getMetric(conn,
                                       metricId,
                                       fields=[schema.metric.c.parameters,
                                               schema.metric.c.status])
    self.assertIn(metricObj.status, (MetricStatus.CREATE_PENDING,
                                     MetricStatus.ACTIVE))
    self.assertEqual(json.loads(metricObj.parameters), modelSpec)

    self._validateModelSpec(json.loads(metricObj.parameters))

    # Export again
    exportSpec = adapter.exportModel(metricId)
    checkExportSpec(exportSpec)
Esempio n. 38
0
    def testExportImportCompleteModelParams(self):
        metricName = "test-" + uuid.uuid1().hex

        adapter = datasource_adapter_factory.createCustomDatasourceAdapter()

        g_log.info("Creating htmengine custom metric; name=%s", metricName)
        metricId = adapter.createMetric(metricName)
        self.addCleanup(adapter.deleteMetricByName, metricName)

        # Add some data
        # NOTE: we discard the fractional part because it gets eliminated
        # in the database, and we will want to compare against retrieved
        # items later.
        now = datetime.datetime.utcnow().replace(microsecond=0)
        data = [(0, now - datetime.timedelta(minutes=5)), (100, now)]

        with self.engine.connect() as conn:
            repository.addMetricData(conn, metricId, data)

        fileName = "custom_datasource_adapter_test_model_config.json"
        with self._openTestDataFile(fileName) as modelConfigFile:
            modelConfig = json.load(modelConfigFile)

        # Turn on monitoring
        modelSpec = {
            "datasource": "custom",
            "metricSpec": {
                "metric": metricName
            },
            "completeModelParams": {
                "modelConfig": modelConfig,
                "inferenceArgs": {
                    "predictionSteps": [1],
                    "predictedField": "bar",
                    "inputPredictedField": "auto"
                },
                "timestampFieldName": "foo",
                "valueFieldName": "bar"
            }
        }

        adapter.monitorMetric(modelSpec)

        def checkExportSpec(exportSpec):
            self.assertEqual(exportSpec["datasource"], modelSpec["datasource"])
            self.assertEqual(exportSpec["metricSpec"], modelSpec["metricSpec"])
            self.assertSequenceEqual(exportSpec["data"], data)

        # Export
        exportSpec = adapter.exportModel(metricId)
        checkExportSpec(exportSpec)

        # Delete metric
        adapter.deleteMetricByName(metricName)
        self.checkModelDeleted(metricId)

        # Import
        metricId = adapter.importModel(
            htmengine.utils.jsonDecode(htmengine.utils.jsonEncode(exportSpec)))

        with self.engine.connect() as conn:
            metricObj = repository.getMetric(
                conn,
                metricId,
                fields=[schema.metric.c.parameters, schema.metric.c.status])
        self.assertIn(metricObj.status,
                      (MetricStatus.CREATE_PENDING, MetricStatus.ACTIVE))
        self.assertEqual(json.loads(metricObj.parameters), modelSpec)

        self._validateModelSpec(json.loads(metricObj.parameters))

        # Export again
        exportSpec = adapter.exportModel(metricId)
        checkExportSpec(exportSpec)