예제 #1
0
 def GET(self):
   with web.ctx.connFactory() as conn:
     metrics = repository.getCustomMetrics(conn, getMetricDisplayFields(conn))
   convertedMetrics = [convertMetricRowToMetricDict(metric)
                       for metric in metrics]
   self.addStandardHeaders()
   return json.dumps(convertedMetrics)
예제 #2
0
 def GET(self):
   with web.ctx.connFactory() as conn:
     metrics = repository.getCustomMetrics(conn, getMetricDisplayFields(conn))
   convertedMetrics = [convertMetricRowToMetricDict(metric)
                       for metric in metrics]
   self.addStandardHeaders()
   return json.dumps(convertedMetrics)
예제 #3
0
def runServer():
  # Get the current list of custom metrics
  appConfig = Config("application.conf",
                     os.environ.get("APPLICATION_CONFIG_PATH"))

  engine = repository.engineFactory(appConfig)
  global gCustomMetrics
  now = datetime.datetime.utcnow()

  with engine.connect() as conn:
    gCustomMetrics = dict(
      (m.name, [m, now]) for m in repository.getCustomMetrics(conn))

  queueName = appConfig.get("metric_listener", "queue_name")

  global gProfiling
  gProfiling = (appConfig.getboolean("debugging", "profiling") or
                LOGGER.isEnabledFor(logging.DEBUG))
  del appConfig

  metricStreamer = MetricStreamer()
  modelSwapper = ModelSwapperInterface()

  with MessageBusConnector() as bus:
    if not bus.isMessageQeueuePresent(queueName):
      bus.createMessageQueue(mqName=queueName, durable=True)
    LOGGER.info("Waiting for messages. To exit, press CTRL+C")
    with bus.consume(queueName) as consumer:
      messages = []
      messageRxTimes = []
      while True:
        message = consumer.pollOneMessage()
        if message is not None:
          messages.append(message)
          if gProfiling:
            messageRxTimes.append(time.time())

        if message is None or len(messages) >= MAX_MESSAGES_PER_BATCH:
          if messages:
            # Process the batch
            try:
              _handleBatch(engine,
                           messages,
                           messageRxTimes,
                           metricStreamer,
                           modelSwapper)
            except Exception:  # pylint: disable=W0703
              LOGGER.exception("Unknown failure in processing messages.")
              # Make sure that we ack messages when there is an unexpected error
              # to avoid getting hung forever on one bad record.

            # Ack all the messages
            messages[-1].ack(multiple=True)
            # Clear the message buffer
            messages = []
            messageRxTimes = []
          else:
            # Queue is empty, wait before retrying
            time.sleep(POLL_DELAY_SEC)
예제 #4
0
def runServer():
  # Get the current list of custom metrics
  appConfig = Config("application.conf",
                     os.environ["APPLICATION_CONFIG_PATH"])

  engine = repository.engineFactory(appConfig)
  global gCustomMetrics
  now = datetime.datetime.utcnow()

  with engine.connect() as conn:
    gCustomMetrics = dict(
      (m.name, [m, now]) for m in repository.getCustomMetrics(conn))

  queueName = appConfig.get("metric_listener", "queue_name")

  global gProfiling
  gProfiling = (appConfig.getboolean("debugging", "profiling") or
                LOGGER.isEnabledFor(logging.DEBUG))
  del appConfig

  metricStreamer = MetricStreamer()
  modelSwapper = ModelSwapperInterface()

  with MessageBusConnector() as bus:
    if not bus.isMessageQeueuePresent(queueName):
      bus.createMessageQueue(mqName=queueName, durable=True)
    LOGGER.info("Waiting for messages. To exit, press CTRL+C")
    with bus.consume(queueName) as consumer:
      messages = []
      messageRxTimes = []
      while True:
        message = consumer.pollOneMessage()
        if message is not None:
          messages.append(message)
          if gProfiling:
            messageRxTimes.append(time.time())

        if message is None or len(messages) >= MAX_MESSAGES_PER_BATCH:
          if messages:
            # Process the batch
            try:
              _handleBatch(engine,
                           messages,
                           messageRxTimes,
                           metricStreamer,
                           modelSwapper)
            except Exception:  # pylint: disable=W0703
              LOGGER.exception("Unknown failure in processing messages.")
              # Make sure that we ack messages when there is an unexpected error
              # to avoid getting hung forever on one bad record.

            # Ack all the messages
            messages[-1].ack(multiple=True)
            # Clear the message buffer
            messages = []
            messageRxTimes = []
          else:
            # Queue is empty, wait before retrying
            time.sleep(POLL_DELAY_SEC)
예제 #5
0
  def checkMetricCreated(self, metricName, numRecords=None):
    """Check that the new metrics show up in custom metrics list.

    :param metricName: metric name to check
    :param numRecords: optional number of records to wait for
    """
    engine = repository.engineFactory(config=self.__config)

    with engine.begin() as conn:
      metrics = repository.getCustomMetrics(conn)

    for metric in metrics:
      if metric.name == metricName:
        if numRecords:
          self.assertGreaterEqual(metric.last_rowid,numRecords)
        return metric.uid

    raise AssertionError("Metric not created!")
예제 #6
0
    def checkMetricCreated(self, metricName, numRecords=None):
        """Check that the new metrics show up in custom metrics list.

    :param metricName: metric name to check
    :param numRecords: optional number of records to wait for
    """
        engine = repository.engineFactory(config=self.__config)

        with engine.begin() as conn:
            metrics = repository.getCustomMetrics(conn)

        for metric in metrics:
            if metric.name == metricName:
                if numRecords:
                    self.assertGreaterEqual(metric.last_rowid, numRecords)
                return metric.uid

        raise AssertionError("Metric not created!")