Example #1
0
def getBaseConnectionArgsDict():
    """Return a dictonary of common database connection arguments."""
    return {
        "host": config.get("repository", "host"),
        "port": config.getint("repository", "port"),
        "user": config.get("repository", "user"),
        "passwd": config.get("repository", "passwd"),
        "charset": "utf8",
        "use_unicode": True,
    }
Example #2
0
def getBaseConnectionArgsDict():
  """Return a dictonary of common database connection arguments."""
  return {
    "host": config.get("repository", "host"),
    "port": config.getint("repository", "port"),
    "user": config.get("repository", "user"),
    "passwd": config.get("repository", "passwd"),
    "charset": "utf8",
    "use_unicode": True,
  }
Example #3
0
def reset(offline=False, **kwargs):
    """
  Reset the taurus database; upon successful completion, the necessary schema
  are created, but the tables are not populated

  :param offline: False to execute SQL commands; True to just dump SQL commands
    to stdout for offline mode or debugging
  :param bool suppressPromptAndContinueWithDeletion: kwarg only! When passed
    with the value of True, proceeds to drop the Taurus Engine database without
    prompting. Without this arg or if it's False, will prompt the user via
    terminal and expect a specific string to be entered

  :returns: 0 if reset was completed successfully; 1 if user doesn't confirm the
    request
  """
    # Make sure we have the latest version of configuration
    config.loadConfig()
    dbName = config.get("repository", "db")
    dbHost = config.get("repository", "host")

    if not kwargs.get("suppressPromptAndContinueWithDeletion"):
        answer = raw_input(
            "\n"
            "Attention!  You are about to do something irreversible, and potentially"
            " dangerous.\n"
            "\n"
            "To back out immediately without making any changes, feel free to type "
            "anything but \"Yes\" in the prompt below, and press return.\n"
            "\n"
            "Should you choose to continue, the database \"%s\" on \"%s\" will be"
            "permanently deleted.\n"
            "\n"
            "Are you sure you want to continue? " % (dbName, dbHost))

        if answer.strip() != "Yes":
            print "Wise choice, my friend.  Bye."
            return 1

    resetDatabaseSQL = ("DROP DATABASE IF EXISTS %(database)s; "
                        "CREATE DATABASE %(database)s;" % {
                            "database": dbName
                        })
    statements = resetDatabaseSQL.split(";")

    engine = getUnaffiliatedEngine()
    with engine.connect() as connection:
        for s in statements:
            if s.strip():
                connection.execute(s)

    migrate(offline=offline)

    return 0
Example #4
0
def reset(offline=False, **kwargs):
  """
  Reset the taurus database; upon successful completion, the necessary schema
  are created, but the tables are not populated

  :param offline: False to execute SQL commands; True to just dump SQL commands
    to stdout for offline mode or debugging
  :param bool suppressPromptAndContinueWithDeletion: kwarg only! When passed
    with the value of True, proceeds to drop the Taurus Engine database without
    prompting. Without this arg or if it's False, will prompt the user via
    terminal and expect a specific string to be entered

  :returns: 0 if reset was completed successfully; 1 if user doesn't confirm the
    request
  """
  # Make sure we have the latest version of configuration
  config.loadConfig()
  dbName = config.get("repository", "db")
  dbHost = config.get("repository", "host")

  if not kwargs.get("suppressPromptAndContinueWithDeletion"):
    answer = raw_input(
      "\n"
      "Attention!  You are about to do something irreversible, and potentially"
      " dangerous.\n"
      "\n"
      "To back out immediately without making any changes, feel free to type "
      "anything but \"Yes\" in the prompt below, and press return.\n"
      "\n"
      "Should you choose to continue, the database \"%s\" on \"%s\" will be"
      "permanently deleted.\n"
      "\n"
      "Are you sure you want to continue? " % (dbName, dbHost))

    if answer.strip() != "Yes":
      print "Wise choice, my friend.  Bye."
      return 1

  resetDatabaseSQL = (
    "DROP DATABASE IF EXISTS %(database)s; "
    "CREATE DATABASE %(database)s;" % {"database": dbName})
  statements = resetDatabaseSQL.split(";")

  engine = getUnaffiliatedEngine()
  with engine.connect() as connection:
    for s in statements:
      if s.strip():
        connection.execute(s)

  migrate(offline=offline)

  return 0
Example #5
0
def reset(offline=False):
    """
  Reset the taurus database; upon successful completion, the necessary schema
  are created, but the tables are not populated

  :param offline: False to execute SQL commands; True to just dump SQL commands
    to stdout for offline mode or debugging
  """
    # Make sure we have the latest version of configuration
    config.loadConfig()
    dbName = config.get("repository", "db")
    dbHost = config.get("repository", "host")

    if "--suppress-prompt-and-continue-with-deletion" not in sys.argv:
        answer = raw_input(
            "Attention!  You are about to do something irreversible, and potentially"
            " dangerous.\n"
            "\n"
            "To back out immediately without making any changes, feel free to type "
            "anything but \"Yes\" in the prompt below, and press return.\n"
            "\n"
            "Should you choose to continue the database \"%s\" on \"%s\" will be"
            "permanently deleted.  If you do not wish to see this message again, "
            "you can pass --suppress-prompt-and-continue-with-deletion as an "
            "argument to this command.\n"
            "\n"
            "Are you sure you want to continue? " % (dbName, dbHost))

        if answer.strip() != "Yes":
            print "Wise choice, my friend.  Bye."
            return

    resetDatabaseSQL = ("DROP DATABASE IF EXISTS %(database)s; "
                        "CREATE DATABASE %(database)s;" % {
                            "database": dbName
                        })
    statements = resetDatabaseSQL.split(";")

    engine = getUnaffiliatedEngine()
    with engine.connect() as connection:
        for s in statements:
            if s.strip():
                connection.execute(s)

    migrate(offline=offline)
Example #6
0
def reset(offline=False):
  """
  Reset the taurus database; upon successful completion, the necessary schema
  are created, but the tables are not populated

  :param offline: False to execute SQL commands; True to just dump SQL commands
    to stdout for offline mode or debugging
  """
  # Make sure we have the latest version of configuration
  config.loadConfig()
  dbName = config.get("repository", "db")
  dbHost = config.get("repository", "host")

  if "--suppress-prompt-and-continue-with-deletion" not in sys.argv:
    answer = raw_input(
      "Attention!  You are about to do something irreversible, and potentially"
      " dangerous.\n"
      "\n"
      "To back out immediately without making any changes, feel free to type "
      "anything but \"Yes\" in the prompt below, and press return.\n"
      "\n"
      "Should you choose to continue the database \"%s\" on \"%s\" will be"
      "permanently deleted.  If you do not wish to see this message again, "
      "you can pass --suppress-prompt-and-continue-with-deletion as an "
      "argument to this command.\n"
      "\n"
      "Are you sure you want to continue? " % (dbName, dbHost))

    if answer.strip() != "Yes":
      print "Wise choice, my friend.  Bye."
      return

  resetDatabaseSQL = (
      "DROP DATABASE IF EXISTS %(database)s; "
      "CREATE DATABASE %(database)s;" % {"database": dbName})
  statements = resetDatabaseSQL.split(";")

  engine = getUnaffiliatedEngine()
  with engine.connect() as connection:
    for s in statements:
      if s.strip():
        connection.execute(s)

  migrate(offline=offline)
def replayMetricDataToModelResultsExchange(messageBus,
                                           chunksize=DEFAULT_CHUNKSIZE):
    """ Reads metric data and synthesizes model inference result messages to the
  "model results" exchange, simulating the end result of the AnomalyService.
  This will afford the dynamodb service an opportunity to backfill older data
  :param messageBus: message bus connection
  :type messageBus: nta.utils.message_bus_connector.MessageBusConnector
  """
    engine = repository.engineFactory()

    twoWeeksAgo = datetime.datetime.utcnow() - datetime.timedelta(days=14)

    # Properties for publishing model command results on RabbitMQ exchange
    # (same as AnomalyService)
    modelCommandResultProperties = MessageProperties(
        deliveryMode=amqp.constants.AMQPDeliveryModes.PERSISTENT_MESSAGE,
        headers=dict(dataType="model-cmd-result"))

    # Properties for publishing model inference results on RabbitMQ exchange
    # (same as AnomalyService)
    modelInferenceResultProperties = MessageProperties(
        deliveryMode=amqp.constants.AMQPDeliveryModes.PERSISTENT_MESSAGE)

    g_log.info("Getting metric data...")
    result = repository.getMetricData(
        engine,
        score=0,
        fromTimestamp=twoWeeksAgo,
        sort=[metric_data.c.uid, metric_data.c.rowid.asc()])
    numMetricDataRows = result.rowcount
    g_log.info("Got %d rows", numMetricDataRows)

    numModels = 0
    for uid, group in groupby(result, key=lambda x: x.uid):

        @retryOnTransientErrors
        def _getMetric():
            return repository.getMetric(engine, uid)

        metricObj = _getMetric()

        # Send defineModel command to ensure that the metric table entry is created
        numModels += 1
        modelCommandResult = {
            "status": htmengineerrno.SUCCESS,
            "method": "defineModel",
            "modelId": uid,
            "modelInfo": {
                "metricName": metricObj.name,
                "resource": metricObj.server,
                "modelSpec": json.loads(metricObj.parameters)
            }
        }

        # Serialize
        payload = anomaly_service.AnomalyService._serializeModelResult(
            modelCommandResult)

        g_log.info("Sending `defineModel` command: %r",
                   repr(modelCommandResult))
        messageBus.publishExg(exchange=config.get("metric_streamer",
                                                  "results_exchange_name"),
                              routingKey="",
                              body=payload,
                              properties=modelCommandResultProperties)

        metricInfo = dict(uid=metricObj.uid,
                          name=metricObj.name,
                          description=metricObj.description,
                          resource=metricObj.server,
                          location=metricObj.location,
                          datasource=metricObj.datasource,
                          spec=json.loads(metricObj.parameters)["metricSpec"])

        args = [iter(group)] * chunksize
        for num, chunk in enumerate(izip_longest(fillvalue=None, *args)):
            # Create
            inferenceResultsMessage = dict(
                metric=metricInfo,
                results=[
                    dict(rowid=row.rowid,
                         ts=epochFromNaiveUTCDatetime(row.timestamp),
                         value=row.metric_value,
                         rawAnomaly=row.raw_anomaly_score,
                         anomaly=row.anomaly_score) for row in chunk
                    if row is not None
                ])

            # Serialize
            payload = anomaly_service.AnomalyService._serializeModelResult(
                inferenceResultsMessage)

            g_log.info(
                "uid=%s chunk=%d rows=%d payload_size=%d bytes from %s to %s",
                uid, num, len(inferenceResultsMessage["results"]),
                sys.getsizeof(payload),
                datetime.datetime.utcfromtimestamp(
                    inferenceResultsMessage["results"][0].ts),
                datetime.datetime.utcfromtimestamp(
                    inferenceResultsMessage["results"][-1].timestamp))

            messageBus.publishExg(exchange=config.get("metric_streamer",
                                                      "results_exchange_name"),
                                  routingKey="",
                                  body=payload,
                                  properties=modelInferenceResultProperties)

    g_log.info("Done! numMetricDataRows=%d; numModels=%d", numMetricDataRows,
               numModels)
def replayMetricDataToModelResultsExchange(messageBus, chunksize=DEFAULT_CHUNKSIZE):
    """ Reads metric data and synthesizes model inference result messages to the
  "model results" exchange, simulating the end result of the AnomalyService.
  This will afford the dynamodb service an opportunity to backfill older data
  :param messageBus: message bus connection
  :type messageBus: nta.utils.message_bus_connector.MessageBusConnector
  """
    engine = repository.engineFactory()

    twoWeeksAgo = datetime.datetime.utcnow() - datetime.timedelta(days=14)

    # Properties for publishing model command results on RabbitMQ exchange
    # (same as AnomalyService)
    modelCommandResultProperties = MessageProperties(
        deliveryMode=amqp.constants.AMQPDeliveryModes.PERSISTENT_MESSAGE, headers=dict(dataType="model-cmd-result")
    )

    # Properties for publishing model inference results on RabbitMQ exchange
    # (same as AnomalyService)
    modelInferenceResultProperties = MessageProperties(deliveryMode=amqp.constants.AMQPDeliveryModes.PERSISTENT_MESSAGE)

    g_log.info("Getting metric data...")
    result = repository.getMetricData(
        engine, score=0, fromTimestamp=twoWeeksAgo, sort=[metric_data.c.uid, metric_data.c.rowid.asc()]
    )
    numMetricDataRows = result.rowcount
    g_log.info("Got %d rows", numMetricDataRows)

    numModels = 0
    for uid, group in groupby(result, key=lambda x: x.uid):

        @retryOnTransientErrors
        def _getMetric():
            return repository.getMetric(engine, uid)

        metricObj = _getMetric()

        # Send defineModel command to ensure that the metric table entry is created
        numModels += 1
        modelCommandResult = {
            "status": htmengineerrno.SUCCESS,
            "method": "defineModel",
            "modelId": uid,
            "modelInfo": {
                "metricName": metricObj.name,
                "resource": metricObj.server,
                "modelSpec": json.loads(metricObj.parameters),
            },
        }

        # Serialize
        payload = anomaly_service.AnomalyService._serializeModelResult(modelCommandResult)

        g_log.info("Sending `defineModel` command: %r", repr(modelCommandResult))
        messageBus.publishExg(
            exchange=config.get("metric_streamer", "results_exchange_name"),
            routingKey="",
            body=payload,
            properties=modelCommandResultProperties,
        )

        metricInfo = dict(
            uid=metricObj.uid,
            name=metricObj.name,
            description=metricObj.description,
            resource=metricObj.server,
            location=metricObj.location,
            datasource=metricObj.datasource,
            spec=json.loads(metricObj.parameters)["metricSpec"],
        )

        args = [iter(group)] * chunksize
        for num, chunk in enumerate(izip_longest(fillvalue=None, *args)):
            # Create
            inferenceResultsMessage = dict(
                metric=metricInfo,
                results=[
                    dict(
                        rowid=row.rowid,
                        ts=epochFromNaiveUTCDatetime(row.timestamp),
                        value=row.metric_value,
                        rawAnomaly=row.raw_anomaly_score,
                        anomaly=row.anomaly_score,
                    )
                    for row in chunk
                    if row is not None
                ],
            )

            # Serialize
            payload = anomaly_service.AnomalyService._serializeModelResult(inferenceResultsMessage)

            g_log.info(
                "uid=%s chunk=%d rows=%d payload_size=%d bytes from %s to %s",
                uid,
                num,
                len(inferenceResultsMessage["results"]),
                sys.getsizeof(payload),
                datetime.datetime.utcfromtimestamp(inferenceResultsMessage["results"][0].ts),
                datetime.datetime.utcfromtimestamp(inferenceResultsMessage["results"][-1].timestamp),
            )

            messageBus.publishExg(
                exchange=config.get("metric_streamer", "results_exchange_name"),
                routingKey="",
                body=payload,
                properties=modelInferenceResultProperties,
            )

    g_log.info("Done! numMetricDataRows=%d; numModels=%d", numMetricDataRows, numModels)