예제 #1
0
    def _teardown_request(self, exception):
        if exception:
            beeline.add_field('request.error_detail',
                              beeline.internal.stringify_exception(exception))
            beeline.add_field('request.error', str(type(exception)))

        beeline.internal.send_event()
예제 #2
0
        def _start_response(status, headers, *args):
            status_code = int(status[0:4])
            beeline.add_field("response.status_code", status_code)
            if status_code != 500:
                beeline._send_event()
            elif status_code == 500 and not signals.signals_available:
                beeline._send_event()

            return start_response(status, headers, *args)
예제 #3
0
    def wrapper(*args, **kwargs):
        time_start = time.time()

        result = wrapped_function(*args, **kwargs)

        time_end = time.time()
        event_field_name = "timer.%s_dur_ms" % wrapped_function.__name__
        beeline.add_field(event_field_name, (time_end - time_start) * 1000)
        return result
예제 #4
0
    def create_http_event(self, request):
        # Code to be executed for each request before
        # the view (and later middleware) are called.

        trace_name = "django_http_%s" % request.method.lower()
        beeline._new_event(data={
            "type":
            "http_server",
            "request.host":
            request.get_host(),
            "request.method":
            request.method,
            "request.path":
            request.path,
            "request.remote_addr":
            request.META['REMOTE_ADDR'],
            "request.content_length":
            request.META.get('CONTENT_LENGTH', '0'),
            "request.user_agent":
            request.META.get('HTTP_USER_AGENT', ''),
            "request.scheme":
            request.scheme,
            "request.secure":
            request.is_secure(),
            "request.query":
            request.GET.dict(),
            "request.xhr":
            request.is_ajax(),
            "request.post":
            request.POST.dict()
        },
                           trace_name=trace_name,
                           top_level=True)

        response = self.get_response(request)

        # Code to be executed for each request/response after
        # the view is called.

        beeline.add_field("response.status_code", response.status_code)
        beeline._send_event()

        return response
예제 #5
0
def get_schema(dataset):
    """
    Looks up the dataset schema: implements a fake cache and database call to better simulate
    what an actual call might look like. 
    """
    global last_cache_time

    hit_cache = True
    if last_cache_time is None:
        last_cache_time = time.time()
    if time.time() - last_cache_time > CACHE_TIMEOUT:
        # we fall through the cache every 10 seconds
        hit_cache = False
        # pretend to hit a slow database that takes 30-50ms
        time.sleep(random.uniform(.03, .05))
        last_cache_time = time.time()
    beeline.add_field("hitSchemaCache", hit_cache)
    # let's just fail sometimes to pretend
    if random.randint(0, 61) == 0:
        raise SchemaLookupFailure
예제 #6
0
    def __call__(self, execute, sql, params, many, context):
        vendor = context['connection'].vendor
        trace_name = "django_%s_query" % vendor

        with beeline.tracer(trace_name):
            beeline.add({
                "type": "db",
                "db.query": sql,
                "db.query_args": params,
            })

            try:
                db_call_start = datetime.datetime.now()
                result = execute(sql, params, many, context)
                db_call_diff = datetime.datetime.now() - db_call_start
                beeline.add_field("db.duration",
                                  db_call_diff.total_seconds() * 1000)
            except Exception as e:
                beeline.add_field("db.error", str(type(e)))
                beeline.add_field("db.error_detail", str(e))
                raise
            else:
                return result
            finally:
                if vendor == "postgresql" or vendor == "mysql":
                    beeline.add({
                        "db.last_insert_id":
                        context['cursor'].cursor.lastrowid,
                        "db.rows_affected":
                        context['cursor'].cursor.rowcount,
                    })
예제 #7
0
 def _before_request(self):
     beeline.add_field("request.route", flask.request.endpoint)
예제 #8
0
파일: app.py 프로젝트: osulp/hcio-examples
def handle_event(dataset_name):
    event = {}

    # parse JSON body
    try:
        data = json.loads(request.data)
        event['Data'] = data
        beeline.add_field("event_columns", len(event['Data']))
    except (TypeError, json.decoder.JSONDecodeError):
        return JSON_FAILURE_RESPONSE

    # get writekey, timestamp, and sample rate out of HTTP headers
    try:
        get_headers(request, event)
    except ParseFailure:
        return PARSE_FAILURE_RESPONSE

    # authenticate writekey or return 401
    try:
        team = validate_write_key(event['WriteKey'])
        beeline.add_field("team", vars(team))
    except AuthFailure:
        return AUTH_FAILURE_RESPONSE
    except AuthMishapenFailure:
        return AUTH_MISHAPEN_FAILURE_RESPONSE

    # take the writekey and the dataset name and get back a dataset object
    try:
        dataset = resolve_dataset(dataset_name)
        beeline.add_field("dataset", vars(dataset))
    except DatasetLookupFailure:
        return DATASET_LOOKUP_FAILURE_RESPONSE

    # get partition info
    try:
        partition = get_partition(dataset)
        event['ChosenPartition'] = partition
        beeline.add_field("chosen_partition", partition)
    except DatasetLookupFailure:
        return DATASET_LOOKUP_FAILURE_RESPONSE

    # check time - set to now if not present
    if 'Timestamp' not in event:
        event['Timestamp'] = datetime.datetime.now(
            datetime.timezone.utc).isoformat()
    else:
        # record the difference between the event's timestamp and now to help identify
        # lagging events
        event_timestamp = dateutil.parser.parse(event['Timestamp'])
        event_time_delta = datetime.datetime.now(
            datetime.timezone.utc) - event_timestamp
        beeline.add_field("event_time_delta_sec",
                          event_time_delta.total_seconds())
    beeline.add_field("event_time", event['Timestamp'])

    # verify schema
    try:
        get_schema(dataset)
    except SchemaLookupFailure:
        return SCHEMA_LOOKUP_FAILURE_RESPONSE

    # hand off to external service - write to local disk
    write_event(event)
    return ''
def get_comments(slug):
    article = Article.query.filter_by(slug=slug).first()
    if not article:
        raise InvalidUsage.article_not_found()
    beeline.add_field("comment_count", article.comments.count())
    return article.comments
예제 #10
0
 def _teardown_request(self, exception):
     if exception:
         beeline.add_field('request.error_detail', str(exception))
         beeline.add_field('request.error', str(type(exception)))
         beeline._send_event()
예제 #11
0
 def handle_error(self, context):
     beeline.add_field("db.error", str(context.original_exception))
     beeline._send_event()
예제 #12
0
        def _start_response(status, headers, *args):
            beeline.add_field("response.status_code", status)
            beeline._send_event()

            return start_response(status, headers, *args)
예제 #13
0
 def process_exception(self, request, exception):
     beeline.add_field("request.error_detail", str(exception))
예제 #14
0
def get_headers(request, event):
    """
    Pulls three headers out of the HTTP request and ensures they are the correct type.
    Does no additional validation.
    """
    # pull raw values from headers
    write_key = request.headers.get(HEADER_WRITE_KEY)
    beeline.add_field(HEADER_WRITE_KEY, write_key)

    timestamp = request.headers.get(HEADER_TIMESTAMP)
    beeline.add_field(HEADER_TIMESTAMP, timestamp)

    sample_rate = request.headers.get(HEADER_SAMPLE_RATE)
    beeline.add_field(HEADER_SAMPLE_RATE, sample_rate)

    # ensure correct types
    # writekeys are strings, so no conversion needed (we will validate them later)
    event['WriteKey'] = write_key

    # timestamps should be in RFC3339 format
    # if not in the right format or if missing, we should note that and continue
    if timestamp:
        match = RFC3999_REGEX.match(timestamp)
        if not match:
            beeline.add_field("error_time_parsing",
                              "timestamp not in RFC3999 format")
        event['Timestamp'] = timestamp
    else:
        beeline.add_field("error_time_parsing", "no timestamp for event")

    # sample rate should be a positive int, defaults to 1 if empty
    if sample_rate == "":
        sample_rate = "1"
    try:
        parsed_sample_rate = int(sample_rate)
        event['SampleRate'] = parsed_sample_rate
        beeline.add_field("sample_rate", parsed_sample_rate)
    except ValueError:
        raise ParseFailure