Esempio n. 1
0
def test_execute_dsn_kwargs():
    q = 'SELECT 1'
    with testing.postgresql.Postgresql() as postgresql:
        url = postgresql.url()
        dsn = postgresql.dsn()
        conn = psycopg2.connect(dbname=dsn['database'],
                                user=dsn['user'],
                                password='',
                                host=dsn['host'],
                                port=dsn['port'])
        cur = conn.cursor()
        cur.execute(q)

    subsegment = xray_recorder.current_segment().subsegments[0]
    assert subsegment.name == 'execute'
    sql = subsegment.sql
    assert sql['database_type'] == 'PostgreSQL'
    assert sql['user'] == dsn['user']
    assert sql['url'] == url
    assert sql['database_version']
Esempio n. 2
0
def test_only_dynamodb_calls_are_traced():
    """Test only a single subsegment is created for other AWS services.

    As the pynamodb patch applies the botocore patch as well, we need
    to ensure that only one subsegment is created for all calls not
    made by PynamoDB. As PynamoDB calls botocore differently than the
    botocore patch expects we also just get a single subsegment per
    PynamoDB call.
    """
    session = botocore.session.get_session()
    s3 = session.create_client('s3', region_name='us-west-2')
    try:
        s3.get_bucket_location(Bucket='mybucket')
    except ClientError:
        pass

    subsegments = xray_recorder.current_segment().subsegments
    assert len(subsegments) == 1
    assert subsegments[0].name == 's3'
    assert len(subsegments[0].subsegments) == 0
def test_sns_publish_parameters():
    sns = session.create_client('sns', region_name='us-west-2')
    response = {
        'ResponseMetadata': {
            'RequestId': REQUEST_ID,
            'HTTPStatusCode': 200,
        }
    }

    with Stubber(sns) as stubber:
        stubber.add_response('publish', response, {'TopicArn': 'myAmazingTopic', 'Message': 'myBodaciousMessage'})
        sns.publish(TopicArn='myAmazingTopic', Message='myBodaciousMessage')

    subsegment = xray_recorder.current_segment().subsegments[0]
    assert subsegment.http['response']['status'] == 200

    aws_meta = subsegment.aws
    assert aws_meta['topic_arn'] == 'myAmazingTopic'
    assert aws_meta['request_id'] == REQUEST_ID
    assert aws_meta['region'] == 'us-west-2'
    assert aws_meta['operation'] == 'Publish'
Esempio n. 4
0
def home():
    """Homepage route"""

    # Ex 10 - Add annotation with current user's nickname
    document = xray_recorder.current_segment()

    # Does not work.  current_user has no .nickname field.  Use session[] instead.
    #document.put_annotation("user_nickname", flask_login.current_user.nickname);
    document.put_annotation("user_nickname", session['nickname'])

    # Does not work.  xray_recorder instance is a AsyncAWSXRayRecorder, which has no put_annotation() method.
    #xray_recorder.put_annotation("WTFXRAY", "WTFXRAY!!!!!");

    return render_template_string("""
        {% extends "main.html" %}
        {% block content %}
        {% if current_user.is_authenticated %}
        Click <em>my photos</em> to access your photos.
        {% else %}
        Click <em>login in / sign up<em> to access this site.
        {% endif %}
        {% endblock %}""")
Esempio n. 5
0
def test_ddb_table_name():
    ddb = session.create_client('dynamodb', region_name='us-west-2')
    response = {
        'ResponseMetadata': {
            'RequestId': REQUEST_ID,
            'HTTPStatusCode': 403,
        }
    }

    with Stubber(ddb) as stubber:
        stubber.add_response('describe_table', response, {'TableName': 'mytable'})
        ddb.describe_table(TableName='mytable')

    subsegment = xray_recorder.current_segment().subsegments[0]
    assert subsegment.error
    assert subsegment.http['response']['status'] == 403

    aws_meta = subsegment.aws
    assert aws_meta['table_name'] == 'mytable'
    assert aws_meta['request_id'] == REQUEST_ID
    assert aws_meta['region'] == 'us-west-2'
    assert aws_meta['operation'] == 'DescribeTable'
def test_s3_bucket_name_capture():
    s3 = session.create_client('s3', region_name='us-west-2')
    response = {
        'ResponseMetadata': {
            'RequestId': REQUEST_ID,
            'HTTPStatusCode': 200,
        }
    }

    bucket_name = 'mybucket'

    with Stubber(s3) as stubber:
        stubber.add_response('list_objects_v2', response, {'Bucket': bucket_name})
        s3.list_objects_v2(Bucket=bucket_name)

    subsegment = xray_recorder.current_segment().subsegments[0]
    aws_meta = subsegment.aws

    assert aws_meta['bucket_name'] == bucket_name
    assert aws_meta['request_id'] == REQUEST_ID
    assert aws_meta['region'] == 'us-west-2'
    assert aws_meta['operation'] == 'ListObjectsV2'
Esempio n. 7
0
def test_execute_in_pool():
    q = 'SELECT 1'
    with testing.postgresql.Postgresql() as postgresql:
        url = postgresql.url()
        dsn = postgresql.dsn()
        pool = psycopg2.pool.SimpleConnectionPool(1,
                                                  1,
                                                  dbname=dsn['database'],
                                                  user=dsn['user'],
                                                  password='',
                                                  host=dsn['host'],
                                                  port=dsn['port'])
        cur = pool.getconn(key=dsn['user']).cursor()
        cur.execute(q)

    subsegment = xray_recorder.current_segment().subsegments[0]
    assert subsegment.name == 'execute'
    sql = subsegment.sql
    assert sql['database_type'] == 'PostgreSQL'
    assert sql['user'] == dsn['user']
    assert sql['url'] == url
    assert sql['database_version']
Esempio n. 8
0
def test_map_parameter_grouping():
    """
    Test special parameters that have shape of map are recorded
    as a list of keys based on `para_whitelist.json`
    """
    ddb = session.create_client('dynamodb', region_name='us-west-2')
    response = {
        'ResponseMetadata': {
            'RequestId': REQUEST_ID,
            'HTTPStatusCode': 500,
        }
    }

    with Stubber(ddb) as stubber:
        stubber.add_response('batch_write_item', response, {'RequestItems': ANY})
        ddb.batch_write_item(RequestItems={'table1': [{}], 'table2': [{}]})

    subsegment = xray_recorder.current_segment().subsegments[0]
    assert subsegment.fault
    assert subsegment.http['response']['status'] == 500

    aws_meta = subsegment.aws
    assert sorted(aws_meta['table_names']) == ['table1', 'table2']
Esempio n. 9
0
def record_data(key, value, searchable):
    """
    This method is used to collect instrumentation data. Data is stored in
    current subsegment if there is one activae, otherwise it is stored in
    the segment.

    :type key: string
    :param key: identifier for instrumentation data
    :type value: str, bool or number when searchable is True. When searcable
    is False, an instance of dict
    :param value: instrumentation data
    :type searchable: bool
    :param searchable: whether key for instrumentation data can be
    used to search in aws x-ray
    """
    __assert_type__(key, value, searchable)
    segment = xray_recorder.current_segment()
    subsegment = xray_recorder.current_subsegment()
    span = subsegment if subsegment is not None else segment
    if searchable:
        span.put_annotation(key, value)
    else:
        span.put_metadata(key, value)
Esempio n. 10
0
def dynamodb():

    xray_recorder.begin_segment(name='API1', sampling=1)
    current_segment = xray_recorder.current_segment()
    headers = {
        X_RAY_HEADER_TRACE: current_segment.trace_id,
        X_RAY_HEADER_PARENT: current_segment.id
    }
    url = "http://" + os.environ['API2_HOST'] + ":5000"

    r = requests.get(url, headers=headers)

    data = r.json()

    response = jsonify({
        'api1': 'ok',
        'api2': data['api2'],
        'api3': data['api3']
    })
    response.status_code = 200

    xray_recorder.end_segment()

    return response
Esempio n. 11
0
def test_execute_bad_query():
    q = 'SELECT blarg'
    with testing.postgresql.Postgresql() as postgresql:
        dsn = postgresql.dsn()
        conn = pg8000.connect(database=dsn['database'],
                              user=dsn['user'],
                              password='',
                              host=dsn['host'],
                              port=dsn['port'])
        cur = conn.cursor()
        try:
            cur.execute(q)
        except Exception:
            pass

    subsegment = xray_recorder.current_segment().subsegments[-1]
    assert subsegment.name == 'execute'
    sql = subsegment.sql
    assert sql['database_type'] == 'PostgreSQL'
    assert sql['user'] == dsn['user']
    assert sql['database_version']

    exception = subsegment.cause['exceptions'][0]
    assert exception.type == 'ProgrammingError'
Esempio n. 12
0
def test_exception():
    class SampleModel(Model):
        class Meta:
            region = 'us-west-2'
            table_name = 'mytable'

        sample_attribute = UnicodeAttribute(hash_key=True)

    try:
        SampleModel.describe_table()
    except Exception:
        pass

    subsegments = xray_recorder.current_segment().subsegments
    assert len(subsegments) == 1
    subsegment = subsegments[0]
    assert subsegment.name == 'dynamodb'
    assert len(subsegment.subsegments) == 0
    assert subsegment.error

    aws_meta = subsegment.aws
    assert aws_meta['region'] == 'us-west-2'
    assert aws_meta['operation'] == 'DescribeTable'
    assert aws_meta['table_name'] == 'mytable'
Esempio n. 13
0
def current_segment_id():
    return xray_recorder.current_segment().id
Esempio n. 14
0
def test_external_submodules_ignores_file():
    patcher.patch(['tests.mock_module'],
                  ignore_module_patterns=['tests.mock_module.mock_file'])
    assert len(xray_recorder.current_segment().subsegments) == 0
    # We want to make sure patching does not load any of the patched modules
    imported_modules = [
        module for module in TEST_MODULES if module in sys.modules
    ]
    assert not imported_modules

    _call_all_mock_functions()

    assert len(xray_recorder.current_segment().subsegments) == 9
    assert xray_recorder.current_segment().subsegments[0].name == 'mock_init'
    assert xray_recorder.current_segment(
    ).subsegments[1].name == 'mock_subinit'
    assert xray_recorder.current_segment(
    ).subsegments[2].name == 'mock_subfunc'
    assert xray_recorder.current_segment(
    ).subsegments[3].name == 'mock_no_doublepatch'
    assert xray_recorder.current_segment(
    ).subsegments[4].name == 'mock_staticmethod'
    assert xray_recorder.current_segment(
    ).subsegments[5].name == 'MockClass.__init__'
    assert xray_recorder.current_segment().subsegments[6].name == 'mock_method'
    assert xray_recorder.current_segment(
    ).subsegments[7].name == 'MockSubclass.__init__'
    assert xray_recorder.current_segment(
    ).subsegments[8].name == 'mock_submethod'
Esempio n. 15
0
def process_sample_topic(event, context):
    logger.structure_logs(append=True,
                          AWSTraceHeader=None,
                          traceId=xray_recorder.current_segment().trace_id)
    logger.debug(event)
Esempio n. 16
0
async def middleware(request, handler):
    """
    Main middleware function, deals with all the X-Ray segment logic
    """
    # Create X-Ray headers
    xray_header = construct_xray_header(request.headers)
    # Get name of service or generate a dynamic one from host
    name = calculate_segment_name(request.headers['host'].split(':', 1)[0],
                                  xray_recorder)

    sampling_decision = calculate_sampling_decision(
        trace_header=xray_header,
        recorder=xray_recorder,
        service_name=request.headers['host'],
        method=request.method,
        path=request.path,
    )

    # Start a segment
    segment = xray_recorder.begin_segment(
        name=name,
        traceid=xray_header.root,
        parent_id=xray_header.parent,
        sampling=sampling_decision,
    )

    segment.save_origin_trace_header(xray_header)
    # Store request metadata in the current segment
    segment.put_http_meta(http.URL, str(request.url))
    segment.put_http_meta(http.METHOD, request.method)

    if 'User-Agent' in request.headers:
        segment.put_http_meta(http.USER_AGENT, request.headers['User-Agent'])

    if 'X-Forwarded-For' in request.headers:
        segment.put_http_meta(http.CLIENT_IP,
                              request.headers['X-Forwarded-For'])
        segment.put_http_meta(http.X_FORWARDED_FOR, True)
    elif 'remote_addr' in request.headers:
        segment.put_http_meta(http.CLIENT_IP, request.headers['remote_addr'])
    else:
        segment.put_http_meta(http.CLIENT_IP, request.remote)

    try:
        # Call next middleware or request handler
        response = await handler(request)
    except Exception as err:
        # Store exception information including the stacktrace to the segment
        segment = xray_recorder.current_segment()
        segment.put_http_meta(http.STATUS, 500)
        stack = traceback.extract_stack(limit=xray_recorder.max_trace_back)
        segment.add_exception(err, stack)
        xray_recorder.end_segment()
        raise

    # Store response metadata into the current segment
    segment.put_http_meta(http.STATUS, response.status)

    if 'Content-Length' in response.headers:
        length = int(response.headers['Content-Length'])
        segment.put_http_meta(http.CONTENT_LENGTH, length)

    header_str = prepare_response_header(xray_header, segment)
    response.headers[http.XRAY_HEADER] = header_str

    # Close segment so it can be dispatched off to the daemon
    xray_recorder.end_segment()
    return response
Esempio n. 17
0
def current_trace_id():
    return xray_recorder.current_segment().trace_id
Esempio n. 18
0
 def raise_exception_to_xray(self, err):
     # Store exception information including the stacktrace to the segment
     segment = xray_recorder.current_segment()
     segment.put_http_meta(http.STATUS, 500)
     stack = traceback.extract_stack(limit=xray_recorder._max_trace_back)
     segment.add_exception(err, stack)
Esempio n. 19
0
def _read_sqs_message():
    sqs = boto3.resource("sqs", **config.BOTO_RESOURCE_KWARGS)

    """
    It is possible that the queue was not created by the time
    the worker launches, because the work queue creation (if needed)
    and the Job spawn are on separate promises and work asyncrhonously.
    This is a performance improvement but it causes the race condition above.

    If this is the case, we just return an empty response
    as if we didn't receive a message in this time frame.
    """
    try:
        queue = sqs.get_queue_by_name(QueueName=config.QUEUE_NAME)
    except ClientError as e:
        if e.response["Error"]["Code"] == "AWS.SimpleQueueService.NonExistentQueue":
            return None
        else:
            raise e

    message = queue.receive_messages(
        WaitTimeSeconds=20, AttributeNames=["AWSTraceHeader"]
    )

    if not message:
        return None

    # Try to parse it as JSON
    try:
        message = message[0]

        trace_header = message.attributes and message.attributes.get(
            "AWSTraceHeader", None
        )

        if trace_header:
            xray.global_sdk_config.set_sdk_enabled(True)

            header = TraceHeader.from_header_str(trace_header)
            trace_id = header.root
            sampled = header.sampled

            xray_recorder.begin_segment(
                f"worker-{config.CLUSTER_ENV}-{config.SANDBOX_ID}",
                traceid=trace_id,
                sampling=sampled,
                parent_id=header.parent,
            )

        body = json.loads(message.body)
        info("Consumed a message from SQS.")
    except Exception as e:
        xray_recorder.current_segment().add_exception(e, traceback.format_exc())

        info("Exception when loading message", message.body)
        info("Exception:", e)
        return None
    finally:
        message.delete()

    return body