def get_public_images(self): connection = self.get_iaas_compute_con() timer = statsd.Timer('phantomweb') timer_cloud = statsd.Timer('phantomweb') timer.start() timer_cloud.start() l = connection.get_all_images() timer.stop('get_all_images.timing') timer_cloud.stop('get_all_images.%s.timing' % self.cloudname) public_images = [u.id for u in l if u.is_public] return public_images
def test_timer_total(self, mock_client): timer4 = statsd.Timer('timer4') timer4.start() timer4.stop() assert self.get_time(mock_client, 'timer4.total') == 123.4, \ 'This test must execute within 2ms' timer5 = statsd.Timer('timer5') timer5.start() timer5.stop('test') assert self.get_time(mock_client, 'timer5.test') == 123.4, \ 'This test must execute within 2ms'
def test_timer_zero(self, mock_client): timer8 = statsd.Timer('timer8', min_send_threshold=0) timer8.start() timer8.stop() assert mock_client._send.call_args is None, \ '0 timings shouldnt be sent' timer9 = statsd.Timer('timer9', min_send_threshold=0) timer9.start() timer9.stop('test') assert mock_client._send.call_args is None, \ '0 timings shouldnt be sent'
def get_keys(self): connection = self.get_iaas_compute_con() keyname_list = [] try: timer = statsd.Timer('phantomweb') timer_cloud = statsd.Timer('phantomweb') timer.start() timer_cloud.start() keypairs = connection.get_all_key_pairs() timer.stop('get_all_key_pairs.timing') timer_cloud.stop('get_all_key_pairs.%s.timing' % self.cloudname) keyname_list = [k.name for k in keypairs] except Exception, boto_ex: log.error("Error connecting to the service %s" % (str(boto_ex)))
def time(self, key, time): """ Timer metric """ check_key(key) assert isinstance(time, Number) self._timers[key] = self._timers.get(key, statsd.Timer('')) self._timers[key].send(key, time)
def get_user_images(self): connection = self.get_iaas_compute_con() timer = statsd.Timer('phantomweb') timer.start() l = connection.get_all_images(owners=['self']) timer.stop('get_all_images.timing') user_images = [u.id for u in l if not u.is_public] return user_images
def test_context_manager(self, mock_client): timer = statsd.Timer('cm') with timer: # Do something here pass assert self.get_time(mock_client, 'cm.total') == 123.4, \ 'This test must execute within 2ms'
def test_timer_intermediate(self, mock_client): timer6 = statsd.Timer('timer6') timer6.start() timer6.intermediate('extras') assert self.get_time(mock_client, 'timer6.extras') == 123.4, \ 'This test must execute within 2ms' timer6.stop() assert self.get_time(mock_client, 'timer6.total') == 370.2, \ 'This test must execute within 2ms' timer7 = statsd.Timer('timer7') timer7.start() timer7.intermediate('extras') assert self.get_time(mock_client, 'timer7.extras') == 123.4, \ 'This test must execute within 2ms' timer7.stop('test') assert self.get_time(mock_client, 'timer7.test') == 370.2, \ 'This test must execute within 2ms'
def post_event_to_webhook_ee(self: Task, event: Dict[str, Any], team_id: int, site_url: str) -> None: if not site_url: site_url = settings.SITE_URL timer = statsd.Timer("%s_posthog_cloud" % (settings.STATSD_PREFIX,)) timer.start() team = Team.objects.select_related("organization").get(pk=team_id) elements_list = chain_to_elements(event.get("elements_chain", "")) ephemeral_postgres_event = Event.objects.create( event=event["event"], distinct_id=event["distinct_id"], properties=event["properties"], team=team, site_url=site_url, **({"timestamp": event["timestamp"]} if event["timestamp"] else {}), **({"elements": elements_list}) ) try: is_zapier_available = team.organization.is_feature_available("zapier") actionFilters = {"team_id": team_id} if not is_zapier_available: if not team.slack_incoming_webhook: return # Exit this task if neither Zapier nor webhook URL are available else: actionFilters["post_to_slack"] = True # We only need to fire for actions that are posted to webhook URL for action in cast(Sequence[Action], Action.objects.filter(**actionFilters).all()): qs = Event.objects.filter(pk=ephemeral_postgres_event.pk).query_db_by_action(action) if not qs: continue # REST hooks if is_zapier_available: action.on_perform(ephemeral_postgres_event) # webhooks if team.slack_incoming_webhook and action.post_to_slack: message_text, message_markdown = get_formatted_message(action, ephemeral_postgres_event, site_url) if determine_webhook_type(team) == "slack": message = { "text": message_text, "blocks": [{"type": "section", "text": {"type": "mrkdwn", "text": message_markdown}}], } else: message = { "text": message_markdown, } statsd.Counter("%s_posthog_cloud_hooks_web_fired" % (settings.STATSD_PREFIX)).increment() requests.post(team.slack_incoming_webhook, verify=False, json=message) except: raise finally: timer.stop("hooks_processed_for_event") ephemeral_postgres_event.delete()
def time(self, key, time): """ Timer metric """ check_key(key) assert isinstance(time, Number) key = get_full_key_name(key) timer = statsd.Timer('') timer.send(key, time)
def statsd_timing_task(slug, seconds_taken=1.0, **kwargs): conn = get_statsd_conn() # You might be wondering "Why not use ``timer.start/.stop`` here?" # The problem is that this is a task, likely running out of process # & perhaps with network overhead. We'll measure the timing elsewhere, # in-process, to be as accurate as possible, then use the out-of-process # task for talking to the statsd backend. timer = statsd.Timer(slug, connection=conn) timer.send('total', seconds_taken)
def wrapper(*args, **kwargs): if not ig_conf.STATSD_HOST: result = func(*args, **kwargs) return result scl = statsd.Timer(ig_conf.STATSD_PREFIX) scl.start() result = None try: result = func(*args, **kwargs) except Exception: raise finally: scl.stop(self.counter_name) return result
def wrapper(*args, **kwargs): t0 = time.time() result = func(*args, **kwargs) tdiff = time.time() - t0 if ig_conf.PRINT_CONSOLE_METRICS: print(f"\t{self.counter_name}:\t{tdiff*1000}ms") if ig_conf.STATSD_HOST: statsd_timer = statsd.Timer(ig_conf.STATSD_PREFIX) statsd_timer.send(self.counter_name, tdiff) if ig_conf.SEND_CLOUDWATCH_METRICS: _send_cloudwatch_metric(self.counter_name, tdiff * 1000, unit="Milliseconds") return result
def terminate_domain_instance(username, domain_id, instance_id): user_obj = get_user_object(username) instance_to_terminate = get_domain_instance(username, domain_id, instance_id) if instance_to_terminate is None: raise PhantomWebException("No instance %s available to terminate" % instance_id) instance_iaas_id = instance_to_terminate.get('iaas_instance_id') if instance_iaas_id is None: raise PhantomWebException("Instance %s has no iaas ID" % instance_id) cloud_name = instance_to_terminate.get('cloud') cloud_name = cloud_name.split("/")[-1] iaas_cloud = user_obj.get_cloud(cloud_name) iaas_connection = iaas_cloud.get_iaas_compute_con() log.debug("User %s terminating the instance %s on %s" % (username, instance_iaas_id, cloud_name)) timer = statsd.Timer('phantomweb') timer.start() timer_cloud = statsd.Timer('phantomweb') timer_cloud.start() try: iaas_connection.terminate_instances(instance_ids=[ instance_iaas_id, ]) except Exception: log.exception("Couldn't terminate %s" % instance_iaas_id) timer.stop('terminate_instances.timing') timer_cloud.stop('terminate_instances.%s.timing' % cloud_name) return
def setUp(self): self.timer = statsd.Timer('cm') # get time.time() to always return the same value so that this test # isn't system load dependant. self._time_patch = mock.patch('time.time') time_time = self._time_patch.start() def generator(): i = 0.0 while True: i += 0.1234 yield i time_time.side_effect = generator()
def process_event_ee( distinct_id: str, ip: str, site_url: str, data: dict, team_id: int, now: datetime.datetime, sent_at: Optional[datetime.datetime], event_uuid: UUIDT, ) -> None: timer = statsd.Timer("%s_posthog_cloud" % (settings.STATSD_PREFIX, )) timer.start() properties = data.get("properties", {}) if data.get("$set"): properties["$set"] = data["$set"] if data.get("$set_once"): properties["$set_once"] = data["$set_once"] person_uuid = UUIDT() ts = handle_timestamp(data, now, sent_at) handle_identify_or_alias(data["event"], properties, distinct_id, team_id) if data["event"] == "$snapshot": create_session_recording_event( uuid=event_uuid, team_id=team_id, distinct_id=distinct_id, session_id=properties["$session_id"], snapshot_data=properties["$snapshot_data"], timestamp=ts, ) return _capture_ee( event_uuid=event_uuid, person_uuid=person_uuid, ip=ip, site_url=site_url, team_id=team_id, event=data["event"], distinct_id=distinct_id, properties=properties, timestamp=ts, ) timer.stop("process_event_ee")
def get_event(request): timer = statsd.Timer("%s_posthog_cloud" % (settings.STATSD_PREFIX, )) timer.start() now = timezone.now() try: data_from_request = load_data_from_request(request) data = data_from_request["data"] except TypeError: return cors_response( request, JsonResponse( { "code": "validation", "message": "Malformed request data. Make sure you're sending valid JSON.", }, status=400, ), ) if not data: return cors_response( request, JsonResponse( { "code": "validation", "message": "No data found. Make sure to use a POST request when sending the payload in the body of the request.", }, status=400, ), ) sent_at = _get_sent_at(data, request) token = _get_token(data, request) if not token: return cors_response( request, JsonResponse( { "code": "validation", "message": "API key not provided. You can find your project API key in PostHog project settings.", }, status=401, ), ) team = Team.objects.get_team_from_token(token) if team is None: try: project_id = _get_project_id(data, request) except: return cors_response( request, JsonResponse( { "code": "validation", "message": "Invalid project ID.", }, status=400, ), ) if not project_id: return cors_response( request, JsonResponse( { "code": "validation", "message": "Project API key invalid. You can find your project API key in PostHog project settings.", }, status=401, ), ) user = User.objects.get_from_personal_api_key(token) if user is None: return cors_response( request, JsonResponse( { "code": "validation", "message": "Personal API key invalid.", }, status=401, ), ) team = user.teams.get(id=project_id) if isinstance(data, dict): if data.get("batch"): # posthog-python and posthog-ruby data = data["batch"] assert data is not None elif "engage" in request.path_info: # JS identify call data["event"] = "$identify" # make sure it has an event name if isinstance(data, list): events = data else: events = [data] for event in events: try: distinct_id = _get_distinct_id(event) except KeyError: return cors_response( request, JsonResponse( { "code": "validation", "message": "You need to set user distinct ID field `distinct_id`.", "item": event, }, status=400, ), ) if not event.get("event"): return cors_response( request, JsonResponse( { "code": "validation", "message": "You need to set event name field `event`.", "item": event, }, status=400, ), ) if not event.get("properties"): event["properties"] = {} _ensure_web_feature_flags_in_properties(event, team, distinct_id) event_uuid = UUIDT() if is_ee_enabled(): log_topics = [KAFKA_EVENTS_WAL] if settings.PLUGIN_SERVER_INGESTION: log_topics.append(KAFKA_EVENTS_PLUGIN_INGESTION) statsd.Counter("%s_posthog_cloud_plugin_server_ingestion" % (settings.STATSD_PREFIX, )).increment() log_event( distinct_id=distinct_id, ip=get_ip_address(request), site_url=request.build_absolute_uri("/")[:-1], data=event, team_id=team.id, now=now, sent_at=sent_at, event_uuid=event_uuid, topics=log_topics, ) # must done after logging because process_event_ee modifies the event, e.g. by removing $elements if not settings.PLUGIN_SERVER_INGESTION: process_event_ee( distinct_id=distinct_id, ip=get_ip_address(request), site_url=request.build_absolute_uri("/")[:-1], data=event, team_id=team.id, now=now, sent_at=sent_at, event_uuid=event_uuid, ) else: task_name = "posthog.tasks.process_event.process_event" if settings.PLUGIN_SERVER_INGESTION or team.plugins_opt_in: task_name += "_with_plugins" celery_queue = settings.PLUGINS_CELERY_QUEUE else: celery_queue = settings.CELERY_DEFAULT_QUEUE celery_app.send_task( name=task_name, queue=celery_queue, args=[ distinct_id, get_ip_address(request), request.build_absolute_uri("/")[:-1], event, team.id, now.isoformat(), sent_at, ], ) timer.stop("event_endpoint") return cors_response(request, JsonResponse({"status": 1}))
#!/usr/bin/env python # -*- coding: utf-8 -*- import freebox_v5_status.freeboxstatus import statsd metrics_prefix = "freebox" fbx = freebox_v5_status.freeboxstatus.FreeboxStatus() while True: timer = statsd.Timer(metrics_prefix) timer.start() fbx.update() timer.stop("dataAcquisitionTime") gauge = statsd.Gauge(metrics_prefix) gauge.send("connection.debit.down", fbx.status["adsl"]["synchro_speed"]["down"]) gauge.send("connection.debit.up", fbx.status["adsl"]["synchro_speed"]["up"]) gauge.send("network.WAN.down", fbx.status["network"]["interfaces"]["WAN"]["down"]) gauge.send("network.WAN.up", fbx.status["network"]["interfaces"]["WAN"]["up"]) gauge.send("network.ethernet.down", fbx.status["network"]["interfaces"]["ethernet"]["down"]) gauge.send("network.ethernet.up", fbx.status["network"]["interfaces"]["ethernet"]["up"]) gauge.send("network.switch.down", fbx.status["network"]["interfaces"]["switch"]["down"]) gauge.send("network.switch.up", fbx.status["network"]["interfaces"]["switch"]["up"])
def get_event(request): timer = statsd.Timer("%s_posthog_cloud" % (settings.STATSD_PREFIX,)) timer.start() now = timezone.now() try: data = load_data_from_request(request) except RequestParsingError as error: capture_exception(error) # We still capture this on Sentry to identify actual potential bugs return cors_response( request, generate_exception_response(f"Malformed request data: {error}", code="invalid_payload"), ) if not data: return cors_response( request, generate_exception_response( "No data found. Make sure to use a POST request when sending the payload in the body of the request.", code="no_data", ), ) sent_at = _get_sent_at(data, request) token = _get_token(data, request) if not token: return cors_response( request, generate_exception_response( "API key not provided. You can find your project API key in PostHog project settings.", type="authentication_error", code="missing_api_key", status_code=status.HTTP_401_UNAUTHORIZED, ), ) team = Team.objects.get_team_from_token(token) if team is None: try: project_id = _get_project_id(data, request) except ValueError: return cors_response( request, generate_exception_response("Invalid Project ID.", code="invalid_project", attr="project_id"), ) if not project_id: return cors_response( request, generate_exception_response( "Project API key invalid. You can find your project API key in PostHog project settings.", type="authentication_error", code="invalid_api_key", status_code=status.HTTP_401_UNAUTHORIZED, ), ) user = User.objects.get_from_personal_api_key(token) if user is None: return cors_response( request, generate_exception_response( "Invalid Personal API key.", type="authentication_error", code="invalid_personal_api_key", status_code=status.HTTP_401_UNAUTHORIZED, ), ) team = user.teams.get(id=project_id) if isinstance(data, dict): if data.get("batch"): # posthog-python and posthog-ruby data = data["batch"] assert data is not None elif "engage" in request.path_info: # JS identify call data["event"] = "$identify" # make sure it has an event name if isinstance(data, list): events = data else: events = [data] try: events = preprocess_session_recording_events(events) except ValueError as e: return cors_response(request, generate_exception_response(f"Invalid payload: {e}", code="invalid_payload")) for event in events: try: distinct_id = _get_distinct_id(event) except KeyError: return cors_response( request, generate_exception_response( "You need to set user distinct ID field `distinct_id`.", code="required", attr="distinct_id" ), ) if not event.get("event"): return cors_response( request, generate_exception_response( "You need to set user event name, field `event`.", code="required", attr="event" ), ) if not event.get("properties"): event["properties"] = {} _ensure_web_feature_flags_in_properties(event, team, distinct_id) event_uuid = UUIDT() ip = None if team.anonymize_ips else get_ip_address(request) if is_ee_enabled(): log_topics = [KAFKA_EVENTS_WAL] if settings.PLUGIN_SERVER_INGESTION: log_topics.append(KAFKA_EVENTS_PLUGIN_INGESTION) statsd.Counter("%s_posthog_cloud_plugin_server_ingestion" % (settings.STATSD_PREFIX,)).increment() log_event( distinct_id=distinct_id, ip=ip, site_url=request.build_absolute_uri("/")[:-1], data=event, team_id=team.id, now=now, sent_at=sent_at, event_uuid=event_uuid, topics=log_topics, ) # must done after logging because process_event_ee modifies the event, e.g. by removing $elements if not settings.PLUGIN_SERVER_INGESTION: process_event_ee( distinct_id=distinct_id, ip=ip, site_url=request.build_absolute_uri("/")[:-1], data=event, team_id=team.id, now=now, sent_at=sent_at, event_uuid=event_uuid, ) else: task_name = "posthog.tasks.process_event.process_event_with_plugins" celery_queue = settings.PLUGINS_CELERY_QUEUE celery_app.send_task( name=task_name, queue=celery_queue, args=[distinct_id, ip, request.build_absolute_uri("/")[:-1], event, team.id, now.isoformat(), sent_at,], ) timer.stop("event_endpoint") return cors_response(request, JsonResponse({"status": 1}))
from __future__ import with_statement import mock import statsd with mock.patch('statsd.Client') as mock_client: instance = mock_client.return_value instance._send.return_value = 1 # Some simple decorator tests timer0 = statsd.Timer('timer0') @timer0.decorate def a(): pass a() mock_client._send.assert_called_with(mock.ANY, {'timer0.a': '0|ms'}) timer1 = statsd.Timer('timer1') @timer1.decorate('spam') def b(): pass b() mock_client._send.assert_called_with(mock.ANY, {'timer1.spam': '0|ms'}) timer2 = timer1.get_client('eggs') @timer2.decorate def c(): pass c() mock_client._send.assert_called_with(mock.ANY, {'timer1.eggs.c': '0|ms'})
def Timer(name, suffix=None): if suffix: name = append_suffix(name, suffix) return statsd.Timer("%s.%s" % (get_prefix(), name))
sock.connect((CARBON_SERVER,CARBON_PORT)) sock.send(message) sock.close() if DEBUG: print "\nMetrics sent: " + message except: print "\nCouldn't send metrics" if STATS: stats= statsd.Connection.set_defaults(host=STATSD_SERVER) while (1): if STATS: proptimer = statsd.Timer('edgemon') proptimer.start() n = mydnsrecord.split(".") for i in xrange(len(n), 0, -1): sub = '.'.join(n[i-1:]) #time dns query t_start = time.time() query = dns.message.make_query(sub, dns.rdatatype.A, dns.rdataclass.IN) response = dns.query.udp(query, myauthserverip) t_end = time.time() t_total = t_end - t_start if DEBUG: print response
CARBON_PORT = 2003 mydnsrecord = "test.geo.bigg33k.net" myrecordtype = "txt" mytarget = "ns2.p29.dynect.net" mystatsd = "NS2" oldrec = 0 newrec = 0 intrec = 0 noop = 0 stats = statsd.Connection.set_defaults(host='graphite.bigg33k.net') while (1): proptimer = statsd.Timer('dnsprop') proptimer.start() while (intrec == oldrec): time.sleep(0.05) newrec = subprocess.Popen( ['dig', myrecordtype, mydnsrecord, '+short', mytarget], stdout=subprocess.PIPE).communicate()[0] try: intrec = long(newrec.strip('"' '\n')) #print record except ValueError: noop #print record
def __init__(self): statsd.Connection.set_defaults(host=cfg.CONF.metrics.host, port=cfg.CONF.metrics.port) self._counters = {} self._timer = statsd.Timer('')
def __enter__(self): self.timer = statsd.Timer(self.metric_name) self.timer.start()
def prepare_statsd(parameters): r"""Sends data to statsd Sends a value to statsd. host defaults to ``127.0.0.1`` port defaults to ``8125`` sample_rate defaults to ``1.0`` type Accepted values are ``counter``, ``gauge`` and ``timer``, defaults to ``counter`` value The value to send. Defaults to ``1.0`` multiplier The amount to multiply the value by. Defaults to ``1.0`` delta boolean, only used for gauge, whether to send differential values or absolute values. Defaults to ``False`` prefix the prefix for the stat name backreferences not allowed name the name for the stat, backreferences allowed (required) Example: .. code:: yaml match: Duration: (\d+.\d+)s statsd: type: timer value: {1} prefix: appserver.request name: duration statsd: prefix: appserver.request name: count """ import statsd # noqa statsd_connection = statsd.Connection( host=parameters.get('host', '127.0.0.1'), port=int(parameters.get('port', 8125)), sample_rate=float(parameters.get('sample_rate', 1.0)), ) meter_type = parameters.get('type', 'counter') name_template = logshipper.context.prepare_template(parameters['name']) val_template = logshipper.context.prepare_template( parameters.get('value', 1)) multiplier = float(parameters.get('multiplier', 1.0)) if meter_type == 'counter': statsd_client = statsd.Counter(parameters.get('prefix'), statsd_connection) delta = True elif meter_type == 'gauge': statsd_client = statsd.Gauge(parameters.get('prefix'), statsd_connection) delta_str = str(parameters.get("delta", False)).lower() delta = delta_str in filters.TRUTH_VALUES elif meter_type == 'timer': statsd_client = statsd.Timer(parameters.get('prefix'), statsd_connection) delta = False else: raise ValueError("Unknown meter type, should be one of counter, " "gauge or timer") # pragma: nocover def handle_statsd(message, context): name = name_template.interpolate(context) value = val_template.interpolate(context) if delta: statsd_client.increment(name, float(value) * multiplier) else: statsd_client.send(name, float(value) * multiplier) return handle_statsd
def __init__(self, application, label): self.application = application self.label = label self.t = statsd.Timer(self.application)
number_of_comments = models.Comment.objects.filter( patch__patchset__issue=issue, author=request.user.id, draft=False).count() try: case_id = CASE_ID_RE.match(issue.subject).groups()[0] except AttributeError: return gauge = statsd.Gauge('{0}.codereview.number_of_comments.{case_id}'.format( HOSTNAME, case_id=case_id)) gauge.send( '{author_name}'.format( author_name=request.user.username.replace('.', '_')), number_of_comments) timer = statsd.Timer('{0}.codereview'.format(HOSTNAME)) def set_timers(obj): """Set timers to every callable of the given obj.""" if isinstance(obj, type('')): return set_timers(importlib.import_module(obj)) for attr_name in dir(obj): if not attr_name.startswith('__'): attr = getattr(obj, attr_name) if isinstance(attr, types.FunctionType): setattr(obj, attr_name, timer.decorate(attr)) elif isinstance(attr, type): set_timers(attr)
def get_event(request): timer = statsd.Timer("%s_posthog_cloud" % (settings.STATSD_PREFIX,)) timer.start() now = timezone.now() try: data_from_request = load_data_from_request(request) data = data_from_request["data"] except TypeError: return cors_response( request, JsonResponse( {"code": "validation", "message": "Malformed request data. Make sure you're sending valid JSON.",}, status=400, ), ) if not data: return cors_response( request, JsonResponse( { "code": "validation", "message": "No data found. Make sure to use a POST request when sending the payload in the body of the request.", }, status=400, ), ) sent_at = _get_sent_at(data, request) token = _get_token(data, request) if not token: return cors_response( request, JsonResponse( { "code": "validation", "message": "API key not provided. You can find your project API key in PostHog project settings.", }, status=400, ), ) team = Team.objects.get_team_from_token(token) if team is None: try: project_id = _get_project_id(data, request) except: return cors_response( request, JsonResponse({"code": "validation", "message": "Invalid project ID.",}, status=400,), ) if not project_id: return cors_response( request, JsonResponse( { "code": "validation", "message": "Project API key invalid. You can find your project API key in PostHog project settings.", }, status=400, ), ) user = User.objects.get_from_personal_api_key(token) if user is None: return cors_response( request, JsonResponse({"code": "validation", "message": "Personal API key invalid.",}, status=400,), ) team = user.teams.get(id=project_id) if isinstance(data, dict): if data.get("batch"): # posthog-python and posthog-ruby data = data["batch"] assert data is not None elif "engage" in request.path_info: # JS identify call data["event"] = "$identify" # make sure it has an event name if isinstance(data, list): events = data else: events = [data] for event in events: try: distinct_id = _get_distinct_id(event) except KeyError: return cors_response( request, JsonResponse( { "code": "validation", "message": "You need to set user distinct ID field `distinct_id`.", "item": event, }, status=400, ), ) if "event" not in event: return cors_response( request, JsonResponse( {"code": "validation", "message": "You need to set event name field `event`.", "item": event,}, status=400, ), ) if is_ee_enabled(): process_event_ee( distinct_id=distinct_id, ip=get_ip_address(request), site_url=request.build_absolute_uri("/")[:-1], data=event, team_id=team.id, now=now, sent_at=sent_at, ) else: task_name = "posthog.tasks.process_event.process_event" celery_queue = settings.CELERY_DEFAULT_QUEUE if team.plugins_opt_in: task_name += "_with_plugins" celery_queue = settings.PLUGINS_CELERY_QUEUE celery_app.send_task( name=task_name, queue=celery_queue, args=[ distinct_id, get_ip_address(request), request.build_absolute_uri("/")[:-1], event, team.id, now.isoformat(), sent_at, ], ) if is_ee_enabled() and settings.LOG_TO_WAL: # log the event to kafka write ahead log for processing log_event( distinct_id=distinct_id, ip=get_ip_address(request), site_url=request.build_absolute_uri("/")[:-1], data=event, team_id=team.id, now=now, sent_at=sent_at, ) timer.stop("event_endpoint") return cors_response(request, JsonResponse({"status": 1}))