def setUp(self):
        tenant_id = os.environ.get("AZURE_TENANT")
        client_id = os.environ.get("AZURE_CLIENT_ID")
        client_secret = os.environ.get("AZURE_SECRET")
        subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID")

        if not tenant_id or not client_id or not client_secret or not subscription_id:
            raise ValueError("one or more environment variables are missing")

        credentials = ServicePrincipalCredentials(client_id, client_secret, tenant=tenant_id, verify=True, resource="https://management.azure.com/")
        self.laa_client = LogAnalyticsAlertClient(credentials, subscription_id)
        self.lam_client = LogAnalyticsManagementClient(credentials, subscription_id)

        self.clean_up()
Exemplo n.º 2
0
def main():

    SUBSCRIPTION_ID = os.environ.get("SUBSCRIPTION_ID", None)
    GROUP_NAME = "testgroupx"
    STORAGE_ACCOUNT_NAME = "storageaccountxyzxyzxx"
    NAMESPACE = "namespacex"
    EVENTHUB = "eventhub"
    AUTHORIZATION_RULE = "authorizationx"
    INSIGHT = "insightx"
    WORKSPACE_NAME = "workspacex"
    WORKFLOW_NAME = "workflow"

    # Create client
    # For other authentication approaches, please see: https://pypi.org/project/azure-identity/
    resource_client = ResourceManagementClient(
        credential=DefaultAzureCredential(), subscription_id=SUBSCRIPTION_ID)
    storage_client = StorageManagementClient(
        credential=DefaultAzureCredential(), subscription_id=SUBSCRIPTION_ID)
    eventhub_client = EventHubManagementClient(
        credential=DefaultAzureCredential(), subscription_id=SUBSCRIPTION_ID)
    monitor_client = MonitorClient(credential=DefaultAzureCredential(),
                                   subscription_id=SUBSCRIPTION_ID)
    loganalytics_client = LogAnalyticsManagementClient(
        credentials=DefaultAzureCredential(), subscription_id=SUBSCRIPTION_ID)
    logic_client = LogicManagementClient(credentials=DefaultAzureCredential(),
                                         subscription_id=SUBSCRIPTION_ID)

    # Create resource group
    resource_client.resource_groups.create_or_update(GROUP_NAME,
                                                     {"location": "eastus"})

    # Create Storage
    storage_account = storage_client.storage_accounts.begin_create(
        GROUP_NAME, STORAGE_ACCOUNT_NAME, {
            "sku": {
                "name": "Standard_LRS"
            },
            "kind": "Storage",
            "location": "eastus",
            "enable_https_traffic_only": True
        }).result()

    # Create eventhub authorization rule
    eventhub_client.namespaces.begin_create_or_update(
        GROUP_NAME, NAMESPACE, {
            "sku": {
                "name": "Standard",
                "tier": "Standard"
            },
            "location": "eastus",
            "tags": {
                "tag1": "value1",
                "tag2": "value2"
            }
        }).result()

    eventhub_client.namespaces.create_or_update_authorization_rule(
        GROUP_NAME, NAMESPACE, AUTHORIZATION_RULE,
        {"rights": ["Listen", "Send", "Manage"]})

    eventhub_client.event_hubs.create_or_update(
        GROUP_NAME, NAMESPACE, EVENTHUB, {
            "message_retention_in_days": "4",
            "partition_count": "4",
            "status": "Active",
            "capture_description": {
                "enabled": True,
                "encoding": "Avro",
                "interval_in_seconds": "120",
                "size_limit_in_bytes": "10485763",
                "destination": {
                    "name":
                    "EventHubArchive.AzureBlockBlob",
                    "storage_account_resource_id":
                    storage_account.id,
                    "blob_container":
                    "container",
                    "archive_name_format":
                    "{Namespace}/{EventHub}/{PartitionId}/{Year}/{Month}/{Day}/{Hour}/{Minute}/{Second}"
                }
            }
        })

    eventhub_client.event_hubs.create_or_update_authorization_rule(
        GROUP_NAME, NAMESPACE, EVENTHUB, AUTHORIZATION_RULE,
        {"rights": ["Listen", "Send", "Manage"]})

    # Create workspace
    workspace = loganalytics_client.workspaces.create_or_update(
        GROUP_NAME, WORKSPACE_NAME, {
            "sku": {
                "name": "PerNode"
            },
            "retention_in_days": 30,
            "location": "eastus",
            "tags": {
                "tag1": "val1"
            }
        }).result()

    # Create workflow
    workflow = logic_client.workflows.create_or_update(
        GROUP_NAME, WORKFLOW_NAME, {
            "location": "eastus",
            "definition": {
                "$schema":
                "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
                "contentVersion": "1.0.0.0",
                "parameters": {},
                "triggers": {},
                "actions": {},
                "outputs": {}
            }
        })
    RESOURCE_URI = workflow.id

    # Create diagnostic setting
    diagnostic_setting = monitor_client.diagnostic_settings.create_or_update(
        RESOURCE_URI, INSIGHT, {
            "storage_account_id":
            "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" +
            GROUP_NAME + "/providers/Microsoft.Storage/storageAccounts/" +
            STORAGE_ACCOUNT_NAME + "",
            "workspace_id":
            workspace.id,
            "event_hub_authorization_rule_id":
            "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" +
            GROUP_NAME + "/providers/microsoft.eventhub/namespaces/" +
            NAMESPACE + "/authorizationrules/" + AUTHORIZATION_RULE,
            "event_hub_name":
            EVENTHUB,
            "metrics": [],
            "logs": [{
                "category": "WorkflowRuntime",
                "enabled": True,
                "retention_policy": {
                    "enabled": False,
                    "days": "0"
                }
            }],
        })
    print("Create diagnostic setting:\n{}".format(diagnostic_setting))

    # Get diagnostic setting
    diagnostic_setting = monitor_client.diagnostic_settings.get(
        RESOURCE_URI, INSIGHT)
    print("Get diagnostic setting:\n{}".format(diagnostic_setting))

    # Delete diagnostic setting
    monitor_client.diagnostic_settings.delete(RESOURCE_URI, INSIGHT)
    print("Delete diagnostic setting.")

    # Delete Group
    resource_client.resource_groups.begin_delete(GROUP_NAME).result()
Exemplo n.º 3
0
def main():

    SUBSCRIPTION_ID = os.environ.get("SUBSCRIPTION_ID", None)
    GROUP_NAME = "testgroupx"
    WORKSPACE_NAME = "workspacex"
    SCHEDULED_QUERY_RULE = "scheduledqueryrule"

    # Create client
    # For other authentication approaches, please see: https://pypi.org/project/azure-identity/
    resource_client = ResourceManagementClient(
        credential=DefaultAzureCredential(),
        subscription_id=SUBSCRIPTION_ID
    )
    monitor_client = MonitorClient(
        credential=DefaultAzureCredential(),
        subscription_id=SUBSCRIPTION_ID
    )
    loganalytics_client = LogAnalyticsManagementClient(
        credentials=DefaultAzureCredential(),
        subscription_id=SUBSCRIPTION_ID
    )


    # Create resource group
    resource_client.resource_groups.create_or_update(
        GROUP_NAME,
        {"location": "eastus"}
    )

    # Create workspace
    workspace = loganalytics_client.workspaces.create_or_update(
        GROUP_NAME,
        WORKSPACE_NAME,
        {
          "sku": {
            "name": "PerNode"
          },
          "retention_in_days": 30,
          "location": "eastus",
          "tags": {
            "tag1": "val1"
          }
        }
    ).result()

    # Create scheduled query rule
    schedueld_query_rule = monitor_client.scheduled_query_rules.create_or_update(
        GROUP_NAME,
        SCHEDULED_QUERY_RULE,
        {
          "location": "eastus",
          "description": "log alert description",
          "enabled": "true",
          "provisioning_state": "Succeeded",
          "source": {
            "query": "Heartbeat | summarize AggregatedValue = count() by bin(TimeGenerated, 5m)",
            "data_source_id": workspace.id,
            "query_type": "ResultCount"
          },
          "schedule": {
            "frequency_in_minutes": "15",
            "time_window_in_minutes": "15"
          },
          "action": {
            "odata.type": "Microsoft.WindowsAzure.Management.Monitoring.Alerts.Models.Microsoft.AppInsights.Nexus.DataContracts.Resources.ScheduledQueryRules.AlertingAction",
            "severity": "1",
            "azns_action": {
              "action_group": [],
              "email_subject": "Email Header",
              "custom_webhook_payload": "{}"
            },
            "trigger": {
              "threshold_operator": "GreaterThan",
              "threshold": "3",
              "metric_trigger": {
                "threshold_operator": "GreaterThan",
                "threshold": "5",
                "metric_trigger_type": "Consecutive",
                "metric_column": "Computer"
              }
            }
          }
        }
    )
    print("Create scheduled query rule:\n{}".format(schedueld_query_rule))

    # Get scheduled query rule
    schedueld_query_rule = monitor_client.scheduled_query_rules.get(
        GROUP_NAME,
        SCHEDULED_QUERY_RULE
    )
    print("Get scheduled query rule:\n{}".format(schedueld_query_rule))

    # Patch scheduled query rule
    schedueld_query_rule = monitor_client.scheduled_query_rules.update(
        GROUP_NAME,
        SCHEDULED_QUERY_RULE,
        {
          "enabled": "true"
        }
    )
    print("Update scheduled query rule:\n{}".format(schedueld_query_rule))

    # Delete scheduled query rule
    monitor_client.scheduled_query_rules.delete(
        GROUP_NAME,
        SCHEDULED_QUERY_RULE
    )
    print("Delete scheduled query rule.\n")

    # Delete Group
    resource_client.resource_groups.begin_delete(
        GROUP_NAME
    ).result()
Exemplo n.º 4
0
def get_monitor_client() -> LogAnalyticsManagementClient:
    return LogAnalyticsManagementClient(get_identity(), get_subscription())
from azure.common.credentials import ServicePrincipalCredentials
from azure.mgmt.loganalytics import LogAnalyticsManagementClient
from azure.loganalytics import LogAnalyticsDataClient
from azure.loganalytics.models import QueryBody
import datetime, os
import numpy as np
from pandas import Series, DataFrame
import pandas as pd

tenant_id = os.environ.get('TENANT')
application_id = os.environ.get('CLIENT_ID_JENKINS')
application_secret = os.environ.get('CLIENT_SECRET_JENKINS')
subscription_id = os.environ.get('SUBSCRIPTION_ID')

workspace_id = '81af919a-92f9-4d81-8edf-041b4d81e278'

credentials = ServicePrincipalCredentials(
    client_id=application_id,
    secret=application_secret,
    tenant=tenant_id,
    resource='https://api.loganalytics.io')

log_mgmt_client = LogAnalyticsManagementClient(credentials, subscription_id)

log_client = LogAnalyticsDataClient(credentials)

body = {"query": " AzureActivity | limit 1 ", "timespan": "P1M"}
query_result = log_client.query(workspace_id, body)

print(len(query_result.tables[0].columns))