def test_defaults(self): t = transmission.Transmission() self.assertEqual(t.max_concurrent_batches, 10) self.assertIsInstance(t.pending, queue.Queue) self.assertIsInstance(t.responses, queue.Queue) self.assertEqual(t.block_on_send, False) self.assertEqual(t.block_on_response, False)
def test_args(self): t = transmission.Transmission(max_concurrent_batches=4, block_on_send=True, block_on_response=True) t.start() self.assertEqual(t.max_concurrent_batches, 4) self.assertEqual(t.block_on_send, True) self.assertEqual(t.block_on_response, True) t.close()
def test_user_agent_addition(self): ''' ensure user_agent_addition is included in the User-Agent header ''' with mock.patch('transmission.requests.Session') as m_session: transmission.Transmission(user_agent_addition='foo/1.0') expected = "libhoney-py/" + libhoney.version.VERSION + " foo/1.0" m_session.return_value.headers.update.assert_called_once_with( {'User-Agent': expected})
def init(writekey="", dataset="", sample_rate=1, api_host="https://api.honeycomb.io", max_concurrent_batches=10, block_on_send=False, block_on_response=False, send_interval=0): '''initialize libhoney and prepare it to send events to Honeycomb writekey: the authorization key for your team on Honeycomb. Find your team write key at https://ui.honeycomb.io/account dataset: the name of the default dataset to which to write sample_rate: the default sample rate. 1 / sample_rate events will be sent. max_concurrent_batches: the number of threads to spin up to send events block_on_send: if true, block when send queue fills. If false, drop events until there's room in the queue block_on_response: if true, block when the response queue fills. if false, drop response objects. send_interval: how long to wait before sending events''' global _xmit, g_writekey, g_dataset, g_api_host, g_sample_rate, g_responses global g_block_on_response _xmit = transmission.Transmission(max_concurrent_batches, block_on_send, block_on_response, send_interval) g_writekey = writekey g_dataset = dataset g_api_host = api_host g_sample_rate = sample_rate g_responses = _xmit.get_response_queue() g_block_on_response = block_on_response
def test_flush_after_timeout(self): libhoney.init() with requests_mock.Mocker() as m: m.post("http://urlme/1/batch/dataset", text=json.dumps(100 * [{ "status": 202 }]), status_code=200, request_headers={"X-Honeycomb-Team": "writeme"}) t = transmission.Transmission(max_concurrent_batches=1, send_frequency=0.1) t.start() ev = libhoney.Event() ev.writekey = "writeme" ev.dataset = "dataset" ev.add_field("key", "value") ev.api_host = "http://urlme/" t.send(ev) time.sleep(0.2) resp = t.responses.get() assert resp["status_code"] == 202 t.close()
def test_send(self): t = transmission.Transmission() t.pending = queue.Queue(maxsize=2) t.responses = queue.Queue(maxsize=1) t.send(FakeEvent()) t.send(FakeEvent()) t.send(FakeEvent()) # should overflow sending and land on response t.send(FakeEvent()) # shouldn't throw exception when response is full
def test_grouping(self): libhoney.init() with requests_mock.Mocker() as m: m.post("http://urlme/1/batch/dataset", text=json.dumps(100 * [{ "status": 202 }]), status_code=200, request_headers={"X-Honeycomb-Team": "writeme"}) m.post("http://urlme/1/batch/alt_dataset", text=json.dumps(100 * [{ "status": 202 }]), status_code=200, request_headers={"X-Honeycomb-Team": "writeme"}) t = transmission.Transmission(max_concurrent_batches=1, gzip_enabled=False) t.start() builder = libhoney.Builder() builder.writekey = "writeme" builder.dataset = "dataset" builder.api_host = "http://urlme/" for i in range(100): ev = builder.new_event() ev.created_at = datetime.datetime(2013, 1, 1, 11, 11, 11) ev.add_field("key", i) t.send(ev) builder.dataset = "alt_dataset" for i in range(100): ev = builder.new_event() ev.created_at = datetime.datetime(2013, 1, 1, 11, 11, 11) ev.add_field("key", i) t.send(ev) t.close() resp_count = 0 while not t.responses.empty(): resp = t.responses.get() if resp is None: break assert resp["status_code"] == 202 resp_count += 1 assert resp_count == 200 assert ({h.url for h in m.request_history} == { "http://urlme/1/batch/dataset", "http://urlme/1/batch/alt_dataset" })
def test_send(self): t = transmission.Transmission() t.sd = mock.Mock() qsize = 4 t.pending.qsize = mock.Mock(return_value=qsize) t.pending.put = mock.Mock() t.pending.put_nowait = mock.Mock() t.responses.put = mock.Mock() t.responses.put_nowait = mock.Mock() # put an event non-blocking ev = FakeEvent() ev.metadata = None t.send(ev) t.sd.gauge.assert_called_with("queue_length", 4) t.pending.put_nowait.assert_called_with(ev) t.pending.put.assert_not_called() t.sd.incr.assert_called_with("messages_queued") t.pending.put.reset_mock() t.pending.put_nowait.reset_mock() t.sd.reset_mock() # put an event blocking t.block_on_send = True t.send(ev) t.pending.put.assert_called_with(ev) t.pending.put_nowait.assert_not_called() t.sd.incr.assert_called_with("messages_queued") t.sd.reset_mock() # put an event non-blocking queue full t.block_on_send = False t.pending.put_nowait = mock.Mock(side_effect=queue.Full()) t.send(ev) t.sd.incr.assert_called_with("queue_overflow") t.responses.put_nowait.assert_called_with({ "status_code": 0, "duration": 0, "metadata": None, "body": "", "error": "event dropped; queue overflow", })
def test_send_gzip(self): libhoney.init() with requests_mock.Mocker() as m: m.post("http://urlme/1/batch/datame", text=json.dumps([{ "status": 202 }]), status_code=200, request_headers={"X-Honeycomb-Team": "writeme"}) t = transmission.Transmission(block_on_send=True) t.start() ev = libhoney.Event() ev.writekey = "writeme" ev.dataset = "datame" ev.api_host = "http://urlme/" ev.metadata = "metadaaata" ev.sample_rate = 3 ev.created_at = datetime.datetime(2013, 1, 1, 11, 11, 11) ev.add_field("key", "asdf") t.send(ev) # sending is async even with the mock so block until it happens resp_received = False while not resp_received: resp = t.responses.get() if resp is None: break self.assertEqual(resp["status_code"], 202) self.assertEqual(resp["metadata"], "metadaaata") resp_received = True for req in m.request_history: # verify gzip payload is sane by decompressing and checking contents self.assertEqual(req.headers['Content-Encoding'], 'gzip', "content encoding should be gzip") gz = gzip.GzipFile(fileobj=io.BytesIO(req.body), mode='rb') # json.load in python 3.5 doesn't like binary files, so we can't pass # the gzip stream directly to it uncompressed = gz.read().decode() data = json.loads(uncompressed) self.assertEqual(data[0]['samplerate'], 3) self.assertEqual(data[0]['data']['key'], 'asdf')
def test_batching(self): libhoney.init() with requests_mock.Mocker() as m: m.post("http://urlme/1/batch/datame", text=json.dumps(200 * [{ "status": 202 }]), status_code=200, request_headers={"X-Honeycomb-Team": "writeme"}) t = transmission.Transmission() t.start() for i in range(300): ev = libhoney.Event() ev.writekey = "writeme" ev.dataset = "datame" ev.api_host = "http://urlme/" ev.metadata = "metadaaata" ev.sample_rate = 3 ev.created_at = datetime.datetime(2013, 1, 1, 11, 11, 11) ev.add_field("key", i) t.send(ev) t.close() resp_count = 0 while not t.responses.empty(): resp = t.responses.get() if resp is None: break assert resp["status_code"] == 202 assert resp["metadata"] == "metadaaata" resp_count += 1 assert resp_count == 300 for req in m.request_history: body = req.json() for event in body: assert event["time"] == "2013-01-01T11:11:11Z" assert event["samplerate"] == 3