コード例 #1
0
def log_event(
    distinct_id: str,
    ip: str,
    site_url: str,
    data: dict,
    team_id: int,
    now: datetime.datetime,
    sent_at: Optional[datetime.datetime],
    event_uuid: UUIDT,
    *,
    topics: Sequence[str],
) -> None:
    if settings.DEBUG:
        print(f'Logging event {data["event"]} to Kafka topics {" and ".join(topics)}')
    producer = KafkaProducer()
    data = {
        "uuid": str(event_uuid),
        "distinct_id": distinct_id,
        "ip": ip,
        "site_url": site_url,
        "data": json.dumps(data),
        "team_id": team_id,
        "now": now.isoformat(),
        "sent_at": sent_at.isoformat() if sent_at else "",
    }
    for topic in topics:
        producer.produce(topic=topic, data=data)
コード例 #2
0
 def log_event(
     distinct_id: str,
     ip: Optional[str],
     site_url: str,
     data: dict,
     team_id: int,
     now: datetime,
     sent_at: Optional[datetime],
     event_uuid: UUIDT,
     *,
     topic: str = KAFKA_EVENTS_PLUGIN_INGESTION,
 ) -> None:
     if settings.DEBUG:
         print(f'Logging event {data["event"]} to Kafka topic {topic}')
     data = {
         "uuid": str(event_uuid),
         "distinct_id": distinct_id,
         "ip": ip,
         "site_url": site_url,
         "data": json.dumps(data),
         "team_id": team_id,
         "now": now.isoformat(),
         "sent_at": sent_at.isoformat() if sent_at else "",
     }
     KafkaProducer().produce(topic=topic, data=data)
コード例 #3
0
def log_event_to_dead_letter_queue(
    raw_payload: Dict,
    event_name: str,
    event: Dict,
    error_message: str,
    error_location: str,
    topic: str = KAFKA_DEAD_LETTER_QUEUE,
):
    data = event.copy()

    data["error_timestamp"] = datetime.now().isoformat()
    data["error_location"] = error_location
    data["error"] = error_message
    data["elements_chain"] = ""
    data["id"] = str(UUIDT())
    data["event"] = event_name
    data["raw_payload"] = json.dumps(raw_payload)
    data["now"] = datetime.fromisoformat(data["now"]).replace(tzinfo=None).isoformat() if data["now"] else None
    data["tags"] = ["django_server"]
    data["event_uuid"] = event["uuid"]
    del data["uuid"]

    try:
        KafkaProducer().produce(topic=topic, data=data)
        statsd.incr(settings.EVENTS_DEAD_LETTER_QUEUE_STATSD_METRIC)
    except Exception as e:
        capture_exception(e)
        statsd.incr("events_dead_letter_queue_produce_error")

        if settings.DEBUG:
            print("Failed to produce to events dead letter queue with error:", e)
コード例 #4
0
def log_event(
    distinct_id: str,
    ip: str,
    site_url: str,
    data: dict,
    team_id: int,
    now: datetime.datetime,
    sent_at: Optional[datetime.datetime],
    event_uuid: UUIDT,
) -> None:
    if settings.DEBUG:
        print(f'Logging event {data["event"]} to WAL')
    KafkaProducer().produce(
        topic=KAFKA_EVENTS_WAL,
        data={
            "uuid": str(event_uuid),
            "distinct_id": distinct_id,
            "ip": ip,
            "site_url": site_url,
            "data": json.dumps(data),
            "team_id": team_id,
            "now": now.isoformat(),
            "sent_at": sent_at.isoformat() if sent_at else "",
        },
    )
コード例 #5
0
ファイル: process_event.py プロジェクト: adamb70/posthog
def log_event(
    distinct_id: str,
    ip: str,
    site_url: str,
    data: dict,
    team_id: int,
    now: datetime.datetime,
    sent_at: Optional[datetime.datetime],
) -> None:
    data = {
        "distinct_id": distinct_id,
        "ip": ip,
        "site_url": site_url,
        "data": json.dumps(data),
        "team_id": team_id,
        "now": now.strftime("%Y-%m-%d %H:%M:%S.%f"),
        "sent_at": sent_at.strftime("%Y-%m-%d %H:%M:%S.%f") if sent_at else "",
    }
    p = KafkaProducer()
    p.produce(topic=KAFKA_EVENTS_WAL, data=data)
コード例 #6
0
def log_event(data: Dict, event_name: str) -> None:
    if settings.DEBUG:
        print(f"Logging event {event_name} to Kafka topic {KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC}")

    # TODO: Handle Kafka being unavailable with exponential backoff retries
    try:
        KafkaProducer().produce(topic=KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC, data=data)
        statsd.incr("posthog_cloud_plugin_server_ingestion")
    except Exception as e:
        statsd.incr("capture_endpoint_log_event_error")
        print(f"Failed to produce event to Kafka topic {KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC} with error:", e)
        raise e
コード例 #7
0
ファイル: capture.py プロジェクト: GalDayan/posthog
    def log_event(
        data: Dict,
        event_name: str,
        topic: str = KAFKA_EVENTS_PLUGIN_INGESTION,
    ) -> None:
        if settings.DEBUG:
            print(f"Logging event {event_name} to Kafka topic {topic}")

        # TODO: Handle Kafka being unavailable with exponential backoff retries
        try:
            KafkaProducer().produce(topic=topic, data=data)
        except Exception as e:
            capture_exception(e, {"data": data})
            statsd.incr("capture_endpoint_log_event_error")

            if settings.DEBUG:
                print(
                    f"Failed to produce event to Kafka topic {KAFKA_EVENTS_PLUGIN_INGESTION} with error:",
                    e)
コード例 #8
0
ファイル: capture.py プロジェクト: neilkakkar/posthog
from posthog.exceptions import RequestParsingError, generate_exception_response
from posthog.helpers.session_recording import preprocess_session_recording_events
from posthog.models import Team, User
from posthog.models.feature_flag import get_active_feature_flags
from posthog.models.utils import UUIDT
from posthog.utils import cors_response, get_ip_address, load_data_from_request

if settings.STATSD_HOST is not None:
    statsd.Connection.set_defaults(host=settings.STATSD_HOST,
                                   port=settings.STATSD_PORT)

if is_ee_enabled():
    from ee.kafka_client.client import KafkaProducer
    from ee.kafka_client.topics import KAFKA_EVENTS_PLUGIN_INGESTION

    producer = KafkaProducer()

    def log_event(
        distinct_id: str,
        ip: Optional[str],
        site_url: str,
        data: dict,
        team_id: int,
        now: datetime,
        sent_at: Optional[datetime],
        event_uuid: UUIDT,
        *,
        topic: str = KAFKA_EVENTS_PLUGIN_INGESTION,
    ) -> None:
        if settings.DEBUG:
            print(f'Logging event {data["event"]} to Kafka topic {topic}')