コード例 #1
0
def test_logs_query_batch_additional_workspaces():
    client = LogsQueryClient(_credential())
    query = "union * | where TimeGenerated > ago(100d) | project TenantId | summarize count() by TenantId"

    requests = [
        LogsBatchQuery(
            os.environ['LOG_WORKSPACE_ID'],
            query,
            timespan=timedelta(hours=1),
            additional_workspaces=[os.environ['SECONDARY_WORKSPACE_ID']]),
        LogsBatchQuery(
            os.environ['LOG_WORKSPACE_ID'],
            query,
            timespan=timedelta(hours=1),
            additional_workspaces=[os.environ['SECONDARY_WORKSPACE_ID']]),
        LogsBatchQuery(
            os.environ['LOG_WORKSPACE_ID'],
            query,
            timespan=timedelta(hours=1),
            additional_workspaces=[os.environ['SECONDARY_WORKSPACE_ID']]),
    ]
    response = client.query_batch(requests)

    for resp in response:
        assert len(resp.tables[0].rows) == 2
コード例 #2
0
def test_logs_query_batch_default():
    client = LogsQueryClient(_credential())

    requests = [
        LogsBatchQuery(query="AzureActivity | summarize count()",
                       timespan=timedelta(hours=1),
                       workspace_id=os.environ['LOG_WORKSPACE_ID']),
        LogsBatchQuery(query="""AppRequests | take 10  |
                summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""",
                       timespan=timedelta(hours=1),
                       workspace_id=os.environ['LOG_WORKSPACE_ID']),
        LogsBatchQuery(query="Wrong query | take 2",
                       workspace_id=os.environ['LOG_WORKSPACE_ID'],
                       timespan=None),
    ]
    response = client.query_batch(requests)

    assert len(response) == 3

    r0 = response[0]
    assert r0.tables[0].columns == ['count_']
    r1 = response[1]
    assert r1.tables[0].columns[0] == 'TimeGenerated'
    assert r1.tables[0].columns[1] == '_ResourceId'
    assert r1.tables[0].columns[2] == 'avgRequestDuration'
    r2 = response[2]
    assert r2.__class__ == LogsQueryError
コード例 #3
0
def test_logs_query_batch_with_statistics_in_some():
    client = LogsQueryClient(_credential())

    requests = [
        LogsBatchQuery(
            query="AzureActivity | summarize count()",
            timespan=timedelta(hours=1),
            workspace_id= os.environ['LOG_WORKSPACE_ID']
        ),
        LogsBatchQuery(
            query= """AppRequests|
                summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""",
            timespan=timedelta(hours=1),
            workspace_id= os.environ['LOG_WORKSPACE_ID'],
            include_statistics=True
        ),
        LogsBatchQuery(
            query= "AppRequests",
            workspace_id= os.environ['LOG_WORKSPACE_ID'],
            timespan=None,
            include_statistics=True
        ),
    ]
    response = client.query_batch(requests)

    assert len(response) == 3
    assert response[0].statistics is None
    assert response[2].statistics is not None
コード例 #4
0
def test_logs_batch_query_non_fatal_exception():
    credential = _credential()
    client = LogsQueryClient(credential)
    requests = [
        LogsBatchQuery(query="AzureActivity | summarize count()",
                       timespan=timedelta(hours=1),
                       workspace_id=os.environ['LOG_WORKSPACE_ID']),
        LogsBatchQuery(query="""AppRequests | take 10""",
                       timespan=(datetime(2021, 6, 2), timedelta(days=1)),
                       workspace_id=os.environ['LOG_WORKSPACE_ID']),
        LogsBatchQuery(query="""Bad Query""",
                       workspace_id=os.environ['LOG_WORKSPACE_ID'],
                       timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)),
                       include_statistics=True),
    ]
    responses = client.query_batch(requests)
    r1, r2, r3 = responses[0], responses[1], responses[2]
    assert r1.__class__ == LogsQueryResult
    assert r2.__class__ == LogsQueryResult
    assert r3.__class__ == LogsQueryError
コード例 #5
0
def test_logs_batch_query_partial_exception():
    credential = _credential()
    client = LogsQueryClient(credential)
    requests = [
        LogsBatchQuery(query="AzureActivity | summarize count()",
                       timespan=timedelta(hours=1),
                       workspace_id=os.environ['LOG_WORKSPACE_ID']),
        LogsBatchQuery(query="""AppRequests | take 10""",
                       timespan=(datetime(2021, 6, 2), timedelta(days=1)),
                       workspace_id=os.environ['LOG_WORKSPACE_ID']),
        LogsBatchQuery(query="""let Weight = 92233720368547758;
            range x from 1 to 3 step 1
            | summarize percentilesw(x, Weight * 100, 50)""",
                       workspace_id=os.environ['LOG_WORKSPACE_ID'],
                       timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)),
                       include_statistics=True),
    ]
    responses = client.query_batch(requests)
    r1, r2, r3 = responses[0], responses[1], responses[2]
    assert r1.__class__ == LogsQueryResult
    assert r2.__class__ == LogsQueryResult
    assert r3.__class__ == LogsQueryPartialResult
コード例 #6
0
def test_logs_batch_query_fatal_exception():
    credential = ClientSecretCredential(
        client_id=os.environ['AZURE_CLIENT_ID'],
        client_secret='bad_secret',
        tenant_id=os.environ['AZURE_TENANT_ID'])
    client = LogsQueryClient(credential)
    requests = [
        LogsBatchQuery(query="AzureActivity | summarize count()",
                       timespan=timedelta(hours=1),
                       workspace_id=os.environ['LOG_WORKSPACE_ID']),
        LogsBatchQuery(query="""AppRequestsss | take 10""",
                       timespan=(datetime(2021, 6, 2), timedelta(days=1)),
                       workspace_id=os.environ['LOG_WORKSPACE_ID']),
        LogsBatchQuery(query="""let Weight = 92233720368547758;
            range x from 1 to 3 step 1
            | summarize percentilesw(x, Weight * 100, 50)""",
                       workspace_id=os.environ['LOG_WORKSPACE_ID'],
                       timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)),
                       include_statistics=True),
    ]
    with pytest.raises(HttpResponseError):
        responses = client.query_batch(requests)
コード例 #7
0
        "body": {
            "query": "AzureActivity | summarize count()",
            "timespan": "PT1H"
        },
        "method": "POST",
        "path": "/query",
        "workspace": os.environ['LOG_WORKSPACE_ID']
    },
    {
        "id": "2",
        "headers": {
            "Content-Type": "application/json"
        },
        "body": {
            "query": "AzureActivity | summarize count()",
            "timespan": "PT1H"
        },
        "method": "POST",
        "path": "/fakePath",
        "workspace": os.environ['LOG_WORKSPACE_ID']
    }
]
responses = client.query_batch(requests)

for response in responses:
    try:
        table = response.tables[0]
        df = pd.DataFrame(table.rows, columns=[col.name for col in table.columns])
        print(df)
    except TypeError:
        print(response.error)
コード例 #8
0
class LogsBatchPerfTest(PerfStressTest):
    def __init__(self, arguments):
        super().__init__(arguments)

        # auth configuration
        self.workspace_id = self.get_from_env('LOG_WORKSPACE_ID')

        # Create clients
        self.logs_client = SyncLogsQueryClient(
            credential=SyncDefaultAzureCredential())
        self.async_logs_client = AsyncLogsQueryClient(
            credential=AsyncDefaultAzureCredential())

        self.requests = [
            LogsQueryRequest(query="AzureActivity | summarize count()",
                             start_time=datetime(2021,
                                                 7,
                                                 25,
                                                 0,
                                                 0,
                                                 0,
                                                 tzinfo=timezone.utc),
                             end_time=datetime(2021,
                                               7,
                                               26,
                                               0,
                                               0,
                                               0,
                                               tzinfo=timezone.utc),
                             workspace_id=self.workspace_id),
            LogsQueryRequest(query="""AppRequests | take 10  |
                    summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId""",
                             start_time=datetime(2021,
                                                 7,
                                                 25,
                                                 0,
                                                 0,
                                                 0,
                                                 tzinfo=timezone.utc),
                             end_time=datetime(2021,
                                               7,
                                               26,
                                               0,
                                               0,
                                               0,
                                               tzinfo=timezone.utc),
                             workspace_id=self.workspace_id),
            LogsQueryRequest(query="AppRequests | take 20",
                             workspace_id=self.workspace_id,
                             include_statistics=True),
        ]

    async def close(self):
        """This is run after cleanup.
        
        Use this to close any open handles or clients.
        """
        await self.async_logs_client.close()
        await super().close()

    def run_sync(self):
        """The synchronous perf test.
        
        Try to keep this minimal and focused. Using only a single client API.
        Avoid putting any ancilliary logic (e.g. generating UUIDs), and put this in the setup/init instead
        so that we're only measuring the client API call.
        """
        self.logs_client.query_batch(self.requests)

    async def run_async(self):
        """The asynchronous perf test.
        
        Try to keep this minimal and focused. Using only a single client API.
        Avoid putting any ancilliary logic (e.g. generating UUIDs), and put this in the setup/init instead
        so that we're only measuring the client API call.
        """
        await self.async_logs_client.query_batch(self.requests)
コード例 #9
0
    LogsBatchQuery(query="AzureActivity | summarize count()",
                   timespan=timedelta(hours=1),
                   workspace_id=os.environ['LOGS_WORKSPACE_ID']),
    LogsBatchQuery(query="""bad query""",
                   timespan=timedelta(days=1),
                   workspace_id=os.environ['LOGS_WORKSPACE_ID']),
    LogsBatchQuery(
        query="""let Weight = 92233720368547758;
        range x from 1 to 3 step 1
        | summarize percentilesw(x, Weight * 100, 50)""",
        workspace_id=os.environ['LOGS_WORKSPACE_ID'],
        timespan=(datetime(2021, 6, 2, tzinfo=timezone.utc),
                  datetime(2021, 6, 5, tzinfo=timezone.utc)),  # (start, end)
        include_statistics=True),
]
results = client.query_batch(requests)

for res in results:
    if res.status == LogsQueryStatus.FAILURE:
        # this will be a LogsQueryError
        print(res.message)
    elif res.status == LogsQueryStatus.PARTIAL:
        ## this will be a LogsQueryPartialResult
        print(res.partial_error.message)
        for table in res.partial_data:
            df = pd.DataFrame(table.rows, columns=table.columns)
            print(df)
    elif res.status == LogsQueryStatus.SUCCESS:
        ## this will be a LogsQueryResult
        table = res.tables[0]
        df = pd.DataFrame(table.rows, columns=table.columns)