def close_final_trace( trace_id, span_id, parent_context_data, start_time=None, ): logging.info(f"Closing Overall Trace, ID={trace_id} SPAN={span_id}") logging.info(f"Step Function Started: {start_time.isoformat()}") hc = libhoney.Client(writekey=WRITEKEY, dataset=DATASET, max_concurrent_batches=1) ev = hc.new_event() ev.add_field("service_name", SERVICE_NAME) ev.add_field("trace.trace_id", trace_id) ev.add_field("trace.span_id", span_id) ev.add_field("name", SERVICE_NAME) ev.created_at = start_time end_time = datetime.utcnow() ev.add_field("end_time", end_time.isoformat()) ev.add(parent_context_data) ev.add_field("duration_ms", (end_time - start_time).total_seconds() * 1000.0) ev.send()
def __init__(self, writekey='', dataset='', service_name='', api_host='https://api.honeycomb.io'): if not writekey: writekey = os.environ.get('HONEYCOMB_WRITEKEY', '') if not dataset: dataset = os.environ.get('HONEYCOMB_DATASET', '') if not service_name: service_name = os.environ.get('HONEYCOMB_SERVICE', dataset) transmission_impl = libhoney.transmission.Transmission( user_agent_addition=USER_AGENT_ADDITION, ) request = Session.request if getattr(Session.request, "opentelemetry_ext_requests_applied", False): request = Session.request.__wrapped__ # pylint:disable=no-member # Bind session.request for this object to the non-instrumented version. transmission_impl.session.request = types.MethodType( request, transmission_impl.session) self.client = libhoney.Client( writekey=writekey, dataset=dataset, api_host=api_host, transmission_impl=transmission_impl, ) self.client.add_field('service_name', service_name) self.client.add_field('meta.otel_exporter_version', VERSION) self.client.add_field('meta.local_hostname', socket.gethostname())
def main(): global g_hc g_hc = libhoney.Client(writekey="abcabc123123defdef456456", dataset="factorial.tornado", transmission_impl=TornadoTransmission()) ioloop.IOLoop.current().spawn_callback(event_routine) while True: r = yield g_hc.responses().get() print("Got response: %s" % r)
def _init(self, app): self.outlets.clear() # In case of multiple init self.outlets.append(LogfmtOutlet(app.name)) libhoney_key = app.config.get('EVENTS_HONEYCOMB_KEY') libhoney_dataset = app.config.get('EVENTS_HONEYCOMB_DATASET', app.name) if libhoney_key: libhoney_client = libhoney.Client( writekey=libhoney_key, dataset=libhoney_dataset, user_agent_addition='flask-events/%s' % __version__, ) self.outlets.append(LibhoneyOutlet(libhoney_client))
def create_libhoney_client(writekey, dataset, honeycomb_api): client = libhoney.Client( writekey=writekey, dataset=dataset, block_on_send=True, user_agent_addition='honeyflare/%s' % __version__, api_host=honeycomb_api, ) client.add_field('MetaProcessor', 'honeyflare/%s' % __version__) thread = threading.Thread(target=read_honeycomb_responses, args=(client.responses(), dataset)) thread.start() return client
def read_responses(resp_queue): '''read responses from the libhoney queue, print them out.''' while True: resp = resp_queue.get() # libhoney will enqueue a None value after we call libhoney.close() if resp is None: break status = "sending event with metadata {} took {}ms and got response code {} with message \"{}\" and error message \"{}\"".format( resp["metadata"], resp["duration"], resp["status_code"], resp["body"].rstrip(), resp["error"]) print(status) if __name__ == "__main__": hc = libhoney.Client(writekey=writekey, dataset=dataset, max_concurrent_batches=1) resps = hc.responses() t = threading.Thread(target=read_responses, args=(resps, )) # Mark this thread as a daemon so we don't wait for this thread to exit # before shutting down. Alternatively, to be sure you read all the # responses before exiting, omit this line and explicitly call # libhoney.close() at the end of the script. t.daemon = True t.start() # Attach fields to top-level instance hc.add_field("version", "3.4.5") hc.add_dynamic_field(num_threads)
if found: continue modified = True print(f"updating BigQuery schema with new field {newname}") fields.append( bigquery.SchemaField(newname, "STRING", mode="NULLABLE")) if modified: newschema[n] = bigquery.SchemaField('data', 'RECORD', fields=fields) break if modified: bqtable.schema = newschema bqtable = bq.update_table(bqtable, ["schema"]) honey = libhoney.Client(writekey=config['HONEYCOMB_WRITEKEY'], dataset=config['HONEYCOMB_CLIENT_DATASET']) def submit_event(log, extra={}): bqrow = {} bqrow["event_name"] = log["event"] bqrow["time"] = log["time"] if 'rebble.user' in extra and 'rebble.noident' not in extra: bqrow['rebble_user'] = extra['rebble.user'] if 'rebble.subscribed' in extra and 'rebble.noident' not in extra: bqrow['rebble_subscribed'] = extra['rebble.subscribed'] bqrow["data"] = {} ev = honey.new_event()