def testCreateAllModels(self):

    host = os.environ.get("TAURUS_HTM_SERVER", "127.0.0.1")
    apikey = os.environ.get("TAURUS_APIKEY", "taurus")

    # Resize metrics down to a much smaller random sample of the original
    # so as to not overload the system under test.  We need only to test that
    # everything returned goes through the right channels.

    metrics = {
      key:value
      for (key, value)
      in random.sample(metric_utils.getMetricsConfiguration().items(), 3)
    }

    with patch("taurus.metric_collectors.metric_utils.getMetricsConfiguration",
               return_value=metrics,
               spec_set=metric_utils.getMetricsConfiguration):
      createdModels = metric_utils.createAllModels(host, apikey)

    allModels = metric_utils.getAllModels(host, apikey)

    for model in createdModels:
      self.addCleanup(requests.delete,
                      "https://%s/_metrics/custom/%s" % (host, model["name"]),
                      auth=(apikey, ""),
                      verify=False)
      remoteModel = metric_utils.getOneModel(host, apikey, model["uid"])
      self.assertDictEqual(remoteModel, model)
      self.assertIn(model, allModels)
    def testCreateAllModels(self):

        host = os.environ.get("TAURUS_HTM_SERVER", "127.0.0.1")
        apikey = os.environ.get("TAURUS_APIKEY", "taurus")

        # Resize metrics down to a much smaller random sample of the original
        # so as to not overload the system under test.  We need only to test that
        # everything returned goes through the right channels.

        metrics = {
            key: value
            for (key, value) in random.sample(
                metric_utils.getMetricsConfiguration().items(), 3)
        }

        with patch(
                "taurus.metric_collectors.metric_utils.getMetricsConfiguration",
                return_value=metrics,
                spec_set=metric_utils.getMetricsConfiguration):
            createdModels = metric_utils.createAllModels(host, apikey)

        allModels = metric_utils.getAllModels(host, apikey)

        for model in createdModels:
            self.addCleanup(requests.delete,
                            "https://%s/_metrics/custom/%s" %
                            (host, model["name"]),
                            auth=(apikey, ""),
                            verify=False)
            remoteModel = metric_utils.getOneModel(host, apikey, model["uid"])
            self.assertDictEqual(remoteModel, model)
            self.assertIn(model, allModels)
  def testGetAllModels(self, requestsMock):

    # We'll mock out only the minimal response that would be required to
    # satisfy the requirements of metric_utils.getAllModels(), and then
    # assert that the returned result is the json-decoded response from the
    # mocked out API.
    requestsMock.get.return_value = Mock(status_code=200,
                                         text='[{"parameters":"True"}]')

    result = metric_utils.getAllModels("localhost", "taurus")

    requestsMock.get.assert_called_once_with("https://localhost/_models",
                                             verify=ANY, auth=("taurus", ""))

    self.assertSequenceEqual(result, ({"parameters":"True"},))
Exemple #4
0
    def testGetAllModels(self, requestsMock):

        # We'll mock out only the minimal response that would be required to
        # satisfy the requirements of metric_utils.getAllModels(), and then
        # assert that the returned result is the json-decoded response from the
        # mocked out API.
        requestsMock.get.return_value = Mock(status_code=200,
                                             text='[{"parameters":"True"}]')

        result = metric_utils.getAllModels("localhost", "taurus")

        requestsMock.get.assert_called_once_with("https://localhost/_models",
                                                 verify=ANY,
                                                 auth=("taurus", ""))

        self.assertSequenceEqual(result, ({"parameters": "True"}, ))
def main():
  logging_support.LoggingSupport.initTool()

  try:
    options = _parseArgs()
    g_log.info("Running %s with options=%r", sys.argv[0], options)

    if options["unmonitorAll"]:
      models = metric_utils.getAllModels(
        host=options["htmServer"],
        apiKey=options["apiKey"])
    else:
      models = tuple(
        metric_utils.getOneModel(
          host=options["htmServer"],
          apiKey=options["apiKey"],
          modelId=modelId)
        for modelId in options["modelIds"]
      )

    # Save model objects to file for use by monitor_metrics
    with open(options["modelsFilePath"], "w") as outFile:
      json.dump(models, outFile, indent=4)

    if not models:
      g_log.info("No models to unmonitor")
      return

    g_log.info("Unmonitoring %d models", len(models))

    for i, model in enumerate(models, 1):
      modelId = model["uid"]
      metric_utils.unmonitorMetric(
        host=options["htmServer"],
        apiKey=options["apiKey"],
        modelId=modelId)
      g_log.info("Unmonitored metric=%s (%d of %d)",
                 modelId, i, len(models))

    g_log.info("Unmonitored %d models", len(models))
  except SystemExit as e:
    if e.code != 0:
      g_log.exception("unmonitor_metrics failed")
    raise
  except Exception:
    g_log.exception("unmonitor_metrics failed")
    raise
def main():
    logging_support.LoggingSupport.initTool()

    try:
        options = _parseArgs()
        g_log.info("Running %s with options=%r", sys.argv[0], options)

        if options["unmonitorAll"]:
            models = metric_utils.getAllModels(host=options["htmServer"],
                                               apiKey=options["apiKey"])
        else:
            models = tuple(
                metric_utils.getOneModel(host=options["htmServer"],
                                         apiKey=options["apiKey"],
                                         modelId=modelId)
                for modelId in options["modelIds"])

        # Save model objects to file for use by monitor_metrics
        with open(options["modelsFilePath"], "w") as outFile:
            json.dump(models, outFile, indent=4)

        if not models:
            g_log.info("No models to unmonitor")
            return

        g_log.info("Unmonitoring %d models", len(models))

        for i, model in enumerate(models, 1):
            modelId = model["uid"]
            metric_utils.unmonitorMetric(host=options["htmServer"],
                                         apiKey=options["apiKey"],
                                         modelId=modelId)
            g_log.info("Unmonitored metric=%s (%d of %d)", modelId, i,
                       len(models))

        g_log.info("Unmonitored %d models", len(models))
    except SystemExit as e:
        if e.code != 0:
            g_log.exception("unmonitor_metrics failed")
        raise
    except Exception:
        g_log.exception("unmonitor_metrics failed")
        raise
    def testDeleteCompanies(self):
        host = os.environ.get("TAURUS_HTM_SERVER", "127.0.0.1")
        apiKey = os.environ.get("TAURUS_APIKEY", "taurus")

        # We have four target stocker ticker symbols here:
        #  FOOBAR: has both metrics and an xignite_security symbol
        #  DOLITTLE: has metrics, but no xignite_security symbol
        #  KNOWLITTLE: has no metrics, but has an xignite_security symbol
        #  GOTNOTHING: has neither metrics, nor xignite_security symbol

        negatives = set([
            "{uuid}.ZZZZZZ.CLOSINGPRICE".format(uuid=uuid.uuid1().hex),
            "{uuid}.FOOBAR.ZZZZZZ.VOLUME".format(uuid=uuid.uuid1().hex),
            "FOOBAR.{uuid}".format(uuid=uuid.uuid1().hex),
            ".FOOBAR.{uuid}".format(uuid=uuid.uuid1().hex),
            "{uuid}.FOOBAR.".format(uuid=uuid.uuid1().hex),
            "{uuid}FOOBARCLOSINGPRICE".format(uuid=uuid.uuid1().hex),
        ])

        positives = set([
            "{uuid}.FOOBAR.CLOSINGPRICE".format(uuid=uuid.uuid1().hex),
            "{uuid}.FOOBAR.VOLUME".format(uuid=uuid.uuid1().hex),
            "{uuid}.TWEET.HANDLE.FOOBAR.VOLUME".format(uuid=uuid.uuid1().hex),
            "{uuid}.NEWS.FOOBAR.VOLUME".format(uuid=uuid.uuid1().hex),
            "{uuid}.DOLITTLE.CLOSINGPRICE".format(uuid=uuid.uuid1().hex),
            "{uuid}.DOLITTLE.VOLUME".format(uuid=uuid.uuid1().hex),
            "{uuid}.TWEET.HANDLE.DOLITTLE.VOLUME".format(
                uuid=uuid.uuid1().hex),
        ])

        allTestMetricNames = negatives.union(positives)

        # Register cleanup actions
        for metric in allTestMetricNames:
            self.addCleanup(_safeDeleteMetric,
                            host=host,
                            apiKey=apiKey,
                            metricName=metric)

        # Create custom models. They will be created in "pending data"" state, since
        # we're providing neither data nor min/max; thus we don't need to wait for
        # them to enter "active" model state
        for metric in allTestMetricNames:
            metric_utils.createCustomHtmModel(host=host,
                                              apiKey=apiKey,
                                              metricName=metric,
                                              resourceName=metric,
                                              userInfo=dict(),
                                              modelParams=dict())

        # Verify that all metrics got created in Taurus Engine now
        remoteMetricNames = set(
            obj["name"] for obj in metric_utils.getAllModels(host, apiKey))
        self.assertTrue(
            allTestMetricNames.issubset(remoteMetricNames),
            "Some models didn't get created: {metrics}".format(
                metrics=allTestMetricNames.difference(remoteMetricNames)))

        # Add FOOBAR and KNOWLITTLE to xignite_security table
        def securityExists(symbol):
            security = collectorsdb.engineFactory().execute(
                sql.select([
                    schema.xigniteSecurity.c.symbol
                ]).where(schema.xigniteSecurity.c.symbol == symbol)).scalar()

            if security is not None:
                self.assertEqual(security, symbol)
                return True

            return False

        def addSecurity(symbol):
            self.addCleanup(_deleteSecurity, symbol)
            xignite_agent_utils.insertSecurity(
                engine=collectorsdb.engineFactory(),
                xigniteSecurity={
                    "Symbol": symbol,
                    "CIK": "CIK",
                    "CUSIP": "CUSIP",
                    "ISIN": "ISIN",
                    "Valoren": "Valoren",
                    "Name": "{sym} Inc.".format(sym=symbol),
                    "Market": "Market",
                    "MarketIdentificationCode": "mic1",
                    "MostLiquidExchange": True,
                    "CategoryOrIndustry": "CategoryOrIndustry"
                })

            self.assertTrue(
                securityExists(symbol),
                "inserted {symbol} not found".format(symbol=symbol))

        addSecurity("FOOBAR")
        addSecurity("KNOWLITTLE")

        # Delete companies corresponding to our target ticker symbols
        delete_companies.deleteCompanies(
            tickerSymbols=["FOOBAR", "DOLITTLE", "KNOWLITTLE", "GOTNOTHING"],
            engineServer=host,
            engineApiKey=apiKey,
            warnAboutDestructiveAction=False)

        # Verify that positives got deleted and negatives didn't
        remoteMetricNames = set(
            obj["name"] for obj in metric_utils.getAllModels(host, apiKey))
        self.assertTrue(
            positives.isdisjoint(remoteMetricNames),
            "Some positives didn't get deleted: {metrics}".format(
                metrics=positives.intersection(remoteMetricNames)))

        self.assertTrue(
            negatives.issubset(remoteMetricNames),
            "Some negatives got deleted: {metrics}".format(
                metrics=negatives.difference(remoteMetricNames)))

        # Verify that FOOBAR and KNOWLITTLE got deleted from xignite_security table
        self.assertFalse(securityExists("FOOBAR"),
                         "FOOBAR not deleted from xignite_security")
        self.assertFalse(securityExists("FOOBAR"),
                         "KNOWLITTLE not deleted from xignite_security")
  def testDeleteCompanies(self):
    host = os.environ.get("TAURUS_HTM_SERVER", "127.0.0.1")
    apiKey = os.environ.get("TAURUS_APIKEY", "taurus")

    # We have four target stocker ticker symbols here:
    #  FOOBAR: has both metrics and an xignite_security symbol
    #  DOLITTLE: has metrics, but no xignite_security symbol
    #  KNOWLITTLE: has no metrics, but has an xignite_security symbol
    #  GOTNOTHING: has neither metrics, nor xignite_security symbol

    negatives = set([
      "{uuid}.ZZZZZZ.CLOSINGPRICE".format(uuid=uuid.uuid1().hex),
      "{uuid}.FOOBAR.ZZZZZZ.VOLUME".format(uuid=uuid.uuid1().hex),
      "FOOBAR.{uuid}".format(uuid=uuid.uuid1().hex),
      ".FOOBAR.{uuid}".format(uuid=uuid.uuid1().hex),
      "{uuid}.FOOBAR.".format(uuid=uuid.uuid1().hex),
      "{uuid}FOOBARCLOSINGPRICE".format(uuid=uuid.uuid1().hex),
    ])

    positives = set([
      "{uuid}.FOOBAR.CLOSINGPRICE".format(uuid=uuid.uuid1().hex),
      "{uuid}.FOOBAR.VOLUME".format(uuid=uuid.uuid1().hex),
      "{uuid}.TWEET.HANDLE.FOOBAR.VOLUME".format(uuid=uuid.uuid1().hex),
      "{uuid}.NEWS.FOOBAR.VOLUME".format(uuid=uuid.uuid1().hex),
      "{uuid}.DOLITTLE.CLOSINGPRICE".format(uuid=uuid.uuid1().hex),
      "{uuid}.DOLITTLE.VOLUME".format(uuid=uuid.uuid1().hex),
      "{uuid}.TWEET.HANDLE.DOLITTLE.VOLUME".format(uuid=uuid.uuid1().hex),
    ])

    allTestMetricNames = negatives.union(positives)

    # Register cleanup actions
    for metric in allTestMetricNames:
      self.addCleanup(_safeDeleteMetric,
                      host=host,
                      apiKey=apiKey,
                      metricName=metric)

    # Create custom models. They will be created in "pending data"" state, since
    # we're providing neither data nor min/max; thus we don't need to wait for
    # them to enter "active" model state
    for metric in allTestMetricNames:
      metric_utils.createCustomHtmModel(host=host,
                                        apiKey=apiKey,
                                        metricName=metric,
                                        resourceName=metric,
                                        userInfo=dict(),
                                        modelParams=dict())

    # Verify that all metrics got created in Taurus Engine now
    remoteMetricNames = set(obj["name"] for obj in
                            metric_utils.getAllModels(host, apiKey))
    self.assertTrue(allTestMetricNames.issubset(remoteMetricNames),
                    "Some models didn't get created: {metrics}".format(
                      metrics=allTestMetricNames.difference(remoteMetricNames)))

    # Add FOOBAR and KNOWLITTLE to xignite_security table
    def securityExists(symbol):
      security = collectorsdb.engineFactory().execute(
        sql.select([schema.xigniteSecurity.c.symbol])
        .where(schema.xigniteSecurity.c.symbol == symbol)
      ).scalar()

      if security is not None:
        self.assertEqual(security, symbol)
        return True

      return False

    def addSecurity(symbol):
      self.addCleanup(_deleteSecurity, symbol)
      xignite_agent_utils.insertSecurity(
        engine=collectorsdb.engineFactory(),
        xigniteSecurity={
          "Symbol": symbol,
          "CIK": "CIK",
          "CUSIP": "CUSIP",
          "ISIN": "ISIN",
          "Valoren": "Valoren",
          "Name": "{sym} Inc.".format(sym=symbol),
          "Market": "Market",
          "MarketIdentificationCode": "mic1",
          "MostLiquidExchange": True,
          "CategoryOrIndustry": "CategoryOrIndustry"
        })

      self.assertTrue(securityExists(symbol),
                      "inserted {symbol} not found".format(symbol=symbol))


    addSecurity("FOOBAR")
    addSecurity("KNOWLITTLE")

    # Delete companies corresponding to our target ticker symbols
    delete_companies.deleteCompanies(
      tickerSymbols=["FOOBAR", "DOLITTLE", "KNOWLITTLE", "GOTNOTHING"],
      engineServer=host,
      engineApiKey=apiKey,
      warnAboutDestructiveAction=False)

    # Verify that positives got deleted and negatives didn't
    remoteMetricNames = set(obj["name"] for obj in
                            metric_utils.getAllModels(host, apiKey))
    self.assertTrue(positives.isdisjoint(remoteMetricNames),
                    "Some positives didn't get deleted: {metrics}".format(
                      metrics=positives.intersection(remoteMetricNames)))

    self.assertTrue(negatives.issubset(remoteMetricNames),
                    "Some negatives got deleted: {metrics}".format(
                      metrics=negatives.difference(remoteMetricNames)))

    # Verify that FOOBAR and KNOWLITTLE got deleted from xignite_security table
    self.assertFalse(securityExists("FOOBAR"),
                     "FOOBAR not deleted from xignite_security")
    self.assertFalse(securityExists("FOOBAR"),
                     "KNOWLITTLE not deleted from xignite_security")