def test_logs_query_batch_additional_workspaces(): client = LogsQueryClient(_credential()) query = "union * | where TimeGenerated > ago(100d) | project TenantId | summarize count() by TenantId" requests = [ LogsBatchQuery( os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(hours=1), additional_workspaces=[os.environ['SECONDARY_WORKSPACE_ID']]), LogsBatchQuery( os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(hours=1), additional_workspaces=[os.environ['SECONDARY_WORKSPACE_ID']]), LogsBatchQuery( os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(hours=1), additional_workspaces=[os.environ['SECONDARY_WORKSPACE_ID']]), ] response = client.query_batch(requests) for resp in response: assert len(resp.tables[0].rows) == 2
def test_logs_query_batch_with_statistics_in_some(): client = LogsQueryClient(_credential()) requests = [ LogsBatchQuery( query="AzureActivity | summarize count()", timespan=timedelta(hours=1), workspace_id= os.environ['LOG_WORKSPACE_ID'] ), LogsBatchQuery( query= """AppRequests| summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""", timespan=timedelta(hours=1), workspace_id= os.environ['LOG_WORKSPACE_ID'], include_statistics=True ), LogsBatchQuery( query= "AppRequests", workspace_id= os.environ['LOG_WORKSPACE_ID'], timespan=None, include_statistics=True ), ] response = client.query_batch(requests) assert len(response) == 3 assert response[0].statistics is None assert response[2].statistics is not None
def test_logs_single_query_fatal_exception(): credential = _credential() client = LogsQueryClient(credential) with pytest.raises(HttpResponseError): client.query_workspace('bad_workspace_id', 'AppRequests', timespan=None)
def test_logs_query_batch_default(): client = LogsQueryClient(_credential()) requests = [ LogsBatchQuery(query="AzureActivity | summarize count()", timespan=timedelta(hours=1), workspace_id=os.environ['LOG_WORKSPACE_ID']), LogsBatchQuery(query="""AppRequests | take 10 | summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""", timespan=timedelta(hours=1), workspace_id=os.environ['LOG_WORKSPACE_ID']), LogsBatchQuery(query="Wrong query | take 2", workspace_id=os.environ['LOG_WORKSPACE_ID'], timespan=None), ] response = client.query_batch(requests) assert len(response) == 3 r0 = response[0] assert r0.tables[0].columns == ['count_'] r1 = response[1] assert r1.tables[0].columns[0] == 'TimeGenerated' assert r1.tables[0].columns[1] == '_ResourceId' assert r1.tables[0].columns[2] == 'avgRequestDuration' r2 = response[2] assert r2.__class__ == LogsQueryError
def test_logs_single_query_with_render(): credential = _credential() client = LogsQueryClient(credential) query = """AppRequests | take 10""" # returns LogsQueryResult response = client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query, timespan=None, include_visualization=True) assert response.visualization is not None
def test_logs_server_timeout(): client = LogsQueryClient(_credential()) with pytest.raises(HttpResponseError) as e: response = client.query( os.environ['LOG_WORKSPACE_ID'], "range x from 1 to 10000000000 step 1 | count", server_timeout=1, ) assert e.message.contains('Gateway timeout')
def test_logs_single_query_raises_no_timespan(): credential = _credential() client = LogsQueryClient(credential) query = """AppRequests | where TimeGenerated > ago(12h) | summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""" # returns LogsQueryResult with pytest.raises(TypeError): client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query)
def test_logs_single_query_with_non_200(): credential = _credential() client = LogsQueryClient(credential) query = """AppInsights | where TimeGenerated > ago(12h)""" with pytest.raises(HttpResponseError) as e: client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query, timespan=None) assert "SemanticError" in e.value.message
def test_logs_single_query_with_partial_success(): credential = _credential() client = LogsQueryClient(credential) query = "set truncationmaxrecords=1; union * | project TimeGenerated | take 10" response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=None) assert response is not None
def test_logs_single_query_with_partial_success(): credential = _credential() client = LogsQueryClient(credential) query = """let Weight = 92233720368547758; range x from 1 to 3 step 1 | summarize percentilesw(x, Weight * 100, 50)""" response = client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query, timespan=None) assert response.partial_error is not None assert response.partial_data is not None assert response.__class__ == LogsQueryPartialResult
def test_logs_server_timeout(): client = LogsQueryClient(_credential()) with pytest.raises(HttpResponseError) as e: response = client.query_workspace( os.environ['LOG_WORKSPACE_ID'], "range x from 1 to 1000000000000000 step 1 | count", timespan=None, server_timeout=1, ) assert 'Gateway timeout' in e.value.message
def test_logs_auth(): credential = _credential() client = LogsQueryClient(credential) query = """AppRequests | where TimeGenerated > ago(12h) | summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""" # returns LogsQueryResults response = client.query(os.environ['LOG_WORKSPACE_ID'], query) assert response is not None assert response.tables is not None
def test_logs_single_query_with_statistics(): credential = _credential() client = LogsQueryClient(credential) query = """AppRequests""" # returns LogsQueryResult response = client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query, timespan=None, include_statistics=True) assert response.statistics is not None
def test_query_no_duration(): credential = _credential() client = LogsQueryClient(credential) query = """AppRequests | where TimeGenerated > ago(12h) | summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""" def callback(request): dic = json.loads(request.http_request.body) assert dic.get('timespan') is None # returns LogsQueryResult client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=None)
class LogsPerfTest(PerfStressTest): def __init__(self, arguments): super().__init__(arguments) # auth configuration self.workspace_id = self.get_from_env('LOG_WORKSPACE_ID') self.query = "AppRequests | summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId" # Create clients self.logs_client = SyncLogsQueryClient( credential=SyncDefaultAzureCredential()) self.async_logs_client = AsyncLogsQueryClient( credential=AsyncDefaultAzureCredential()) async def close(self): """This is run after cleanup. Use this to close any open handles or clients. """ await self.async_logs_client.close() await super().close() def run_sync(self): """The synchronous perf test. Try to keep this minimal and focused. Using only a single client API. Avoid putting any ancillary logic (e.g. generating UUIDs), and put this in the setup/init instead so that we're only measuring the client API call. """ start_time = datetime(2021, 7, 25, 0, 0, 0, tzinfo=timezone.utc) end_time = datetime(2021, 7, 26, 0, 0, 0, tzinfo=timezone.utc) self.logs_client.query_workspace(self.workspace_id, self.query, timespan=(start_time, end_time)) async def run_async(self): """The asynchronous perf test. Try to keep this minimal and focused. Using only a single client API. Avoid putting any ancillary logic (e.g. generating UUIDs), and put this in the setup/init instead so that we're only measuring the client API call. """ start_time = datetime(2021, 7, 25, 0, 0, 0, tzinfo=timezone.utc) end_time = datetime(2021, 7, 26, 0, 0, 0, tzinfo=timezone.utc) await self.async_logs_client.query_workspace(self.workspace_id, self.query, timespan=(start_time, end_time))
def test_query_duration_only(): credential = _credential() client = LogsQueryClient(credential) query = "AppRequests | take 5" duration = 'P3D' def callback(request): dic = json.loads(request.http_request.body) assert 'P3D' in dic.get('timespan') client.query(os.environ['LOG_WORKSPACE_ID'], query, duration=duration, raw_request_hook=callback)
def test_query_duration_only(): credential = _credential() client = LogsQueryClient(credential) query = "AppRequests | take 5" duration = timedelta(days=3) def callback(request): dic = json.loads(request.http_request.body) assert 'PT259200.0S' in dic.get('timespan') client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query, timespan=duration, raw_request_hook=callback)
def test_logs_single_query_additional_workspaces(): credential = _credential() client = LogsQueryClient(credential) query = "union * | where TimeGenerated > ago(100d) | project TenantId | summarize count() by TenantId" # returns LogsQueryResult response = client.query_workspace( os.environ['LOG_WORKSPACE_ID'], query, timespan=None, additional_workspaces=[os.environ["SECONDARY_WORKSPACE_ID"]], ) assert response is not None assert len(response.tables[0].rows) == 2
def test_query_start_and_end_time(): credential = _credential() client = LogsQueryClient(credential) query = "AppRequests | take 5" end_time = datetime.now(UTC()) start_time = end_time - timedelta(days=3) def callback(request): dic = json.loads(request.http_request.body) assert dic.get('timespan') is not None client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query, timespan=(start_time, end_time), raw_request_hook=callback)
def test_query_response_types(): credential = _credential() client = LogsQueryClient(credential) query = """AppRequests | summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId, Success, ItemCount, DurationMs""" # returns LogsQueryResult result = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=None) assert isinstance(result.tables[0].rows[0][0], datetime) # TimeGenerated generated is a datetime assert isinstance(result.tables[0].rows[0][1], six.string_types) # _ResourceId generated is a string assert isinstance(result.tables[0].rows[0][2], bool) # Success generated is a bool assert isinstance(result.tables[0].rows[0][3], int) # ItemCount generated is a int
def test_query_duration_and_start_time(): credential = _credential() client = LogsQueryClient(credential) query = "AppRequests | take 5" end_time = datetime.now(UTC()) start_time = end_time - timedelta(days=3) duration = timedelta(days=3) def callback(request): dic = json.loads(request.http_request.body) assert '/PT259200.0S' in dic.get('timespan') client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=(start_time, duration), raw_request_hook=callback)
def test_logs_query_result_row_type(): client = LogsQueryClient(_credential()) query = "AppRequests | take 5" response = client.query_workspace( os.environ['LOG_WORKSPACE_ID'], query, timespan=None, ) ## should iterate over tables for table in response: assert table.__class__ == LogsTable for row in table.rows: assert row.__class__ == LogsTableRow
def test_logs_batch_query(): client = LogsQueryClient(_credential()) requests = [ LogsQueryRequest(query="AzureActivity | summarize count()", timespan="PT1H", workspace=os.environ['LOG_WORKSPACE_ID']), LogsQueryRequest(query="""AppRequests | take 10 | summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""", timespan="PT1H", workspace=os.environ['LOG_WORKSPACE_ID']), LogsQueryRequest(query="AppRequests | take 2", workspace=os.environ['LOG_WORKSPACE_ID']), ] response = client.batch_query(requests) assert len(response.responses) == 3
def test_logs_query_result_iterate_over_tables(): client = LogsQueryClient(_credential()) query = "AppRequests; AppRequests | take 5" response = client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query, timespan=None, include_statistics=True, include_visualization=True) ## should iterate over tables for item in response: assert item.__class__ == LogsTable assert response.statistics is not None assert response.visualization is not None assert len(response.tables) == 2 assert response.__class__ == LogsQueryResult
def test_logs_batch_query_non_fatal_exception(): credential = _credential() client = LogsQueryClient(credential) requests = [ LogsBatchQuery(query="AzureActivity | summarize count()", timespan=timedelta(hours=1), workspace_id=os.environ['LOG_WORKSPACE_ID']), LogsBatchQuery(query="""AppRequests | take 10""", timespan=(datetime(2021, 6, 2), timedelta(days=1)), workspace_id=os.environ['LOG_WORKSPACE_ID']), LogsBatchQuery(query="""Bad Query""", workspace_id=os.environ['LOG_WORKSPACE_ID'], timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), include_statistics=True), ] responses = client.query_batch(requests) r1, r2, r3 = responses[0], responses[1], responses[2] assert r1.__class__ == LogsQueryResult assert r2.__class__ == LogsQueryResult assert r3.__class__ == LogsQueryError
def test_logs_batch_query_fatal_exception(): credential = ClientSecretCredential( client_id=os.environ['AZURE_CLIENT_ID'], client_secret='bad_secret', tenant_id=os.environ['AZURE_TENANT_ID']) client = LogsQueryClient(credential) requests = [ LogsBatchQuery(query="AzureActivity | summarize count()", timespan=timedelta(hours=1), workspace_id=os.environ['LOG_WORKSPACE_ID']), LogsBatchQuery(query="""AppRequestsss | take 10""", timespan=(datetime(2021, 6, 2), timedelta(days=1)), workspace_id=os.environ['LOG_WORKSPACE_ID']), LogsBatchQuery(query="""let Weight = 92233720368547758; range x from 1 to 3 step 1 | summarize percentilesw(x, Weight * 100, 50)""", workspace_id=os.environ['LOG_WORKSPACE_ID'], timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), include_statistics=True), ] with pytest.raises(HttpResponseError): responses = client.query_batch(requests)
def test_logs_batch_query_partial_exception(): credential = _credential() client = LogsQueryClient(credential) requests = [ LogsBatchQuery(query="AzureActivity | summarize count()", timespan=timedelta(hours=1), workspace_id=os.environ['LOG_WORKSPACE_ID']), LogsBatchQuery(query="""AppRequests | take 10""", timespan=(datetime(2021, 6, 2), timedelta(days=1)), workspace_id=os.environ['LOG_WORKSPACE_ID']), LogsBatchQuery(query="""let Weight = 92233720368547758; range x from 1 to 3 step 1 | summarize percentilesw(x, Weight * 100, 50)""", workspace_id=os.environ['LOG_WORKSPACE_ID'], timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)), include_statistics=True), ] responses = client.query_batch(requests) r1, r2, r3 = responses[0], responses[1], responses[2] assert r1.__class__ == LogsQueryResult assert r2.__class__ == LogsQueryResult assert r3.__class__ == LogsQueryPartialResult
def __init__(self, arguments): super().__init__(arguments) # auth configuration self.workspace_id = self.get_from_env('LOG_WORKSPACE_ID') self.query = "AppRequests | summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId" # Create clients self.logs_client = SyncLogsQueryClient( credential=SyncDefaultAzureCredential()) self.async_logs_client = AsyncLogsQueryClient( credential=AsyncDefaultAzureCredential())
def __init__(self, arguments): super().__init__(arguments) # auth configuration self.workspace_id = self.get_from_env('LOG_WORKSPACE_ID') # Create clients self.logs_client = SyncLogsQueryClient( credential=SyncDefaultAzureCredential()) self.async_logs_client = AsyncLogsQueryClient( credential=AsyncDefaultAzureCredential()) self.requests = [ LogsQueryRequest(query="AzureActivity | summarize count()", start_time=datetime(2021, 7, 25, 0, 0, 0, tzinfo=timezone.utc), end_time=datetime(2021, 7, 26, 0, 0, 0, tzinfo=timezone.utc), workspace_id=self.workspace_id), LogsQueryRequest(query="""AppRequests | take 10 | summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""", start_time=datetime(2021, 7, 25, 0, 0, 0, tzinfo=timezone.utc), end_time=datetime(2021, 7, 26, 0, 0, 0, tzinfo=timezone.utc), workspace_id=self.workspace_id), LogsQueryRequest(query="AppRequests | take 20", workspace_id=self.workspace_id, include_statistics=True), ]
# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. import os import pandas as pd from azure.monitor.query import LogsQueryClient from azure.identity import DefaultAzureCredential credential = DefaultAzureCredential() client = LogsQueryClient(credential) requests = [ { "id": "1", "headers": { "Content-Type": "application/json" }, "body": { "query": "AzureActivity | summarize count()", "timespan": "PT1H" }, "method": "POST", "path": "/query", "workspace": os.environ['LOG_WORKSPACE_ID'] }, { "id": "2", "headers": { "Content-Type": "application/json"