def start(self, api_key=None, flush_interval=10, roll_up_interval=10, host=None, device=None, api_host=None, use_ec2_instance_ids=False, flush_in_thread=True, flush_in_greenlet=False, disabled=False, statsd=False, statsd_host='localhost', statsd_port=8125): """ Configure the DogStatsApi instance and optionally, begin auto-flusing metrics. :param api_key: Your DataDog API key. :param flush_interval: The number of seconds to wait between flushes. :param flush_in_thread: True if you'd like to spawn a thread to flush metrics. It will run every `flush_interval` seconds. :param flush_in_greenlet: Set to true if you'd like to flush in a gevent greenlet. """ self.flush_interval = flush_interval self.roll_up_interval = roll_up_interval self.device = device self._disabled = disabled self.host = host or socket.gethostname() if use_ec2_instance_ids: self.host = get_ec2_instance_id() self._is_auto_flushing = False if statsd: # If we're configured to send to a statsd instance, use an aggregator # which forwards packets over UDP. log.info("Initializing dog api to use statsd: %s, %s" % (statsd_host, statsd_port)) self._needs_flush = False self._aggregator = StatsdAggregator(statsd_host, statsd_port) else: # Otherwise create an aggreagtor that while aggregator metrics # in process. self._needs_flush = True self._aggregator = MetricsAggregator(self.roll_up_interval) # The reporter is responsible for sending metrics off to their final destination. # It's abstracted to support easy unit testing and in the near future, forwarding # to the datadog agent. self.reporter = HttpReporter(api_key=api_key, api_host=api_host) self._is_flush_in_progress = False self.flush_count = 0 if self._disabled: log.info("dogapi is disabled. No metrics will flush.") else: if flush_in_greenlet: self._start_flush_greenlet() elif flush_in_thread: self._start_flush_thread()