예제 #1
0
def sample_get_detection_config(detection_config_id):
    # [START get_detection_config]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")

    client = MetricsAdvisorAdministrationClient(
        service_endpoint, MetricsAdvisorKeyCredential(subscription_key,
                                                      api_key))

    config = client.get_detection_configuration(detection_config_id)

    print("Detection config name: {}".format(config.name))
    print("Description: {}".format(config.description))
    print("Metric ID: {}".format(config.metric_id))

    print("\nDetection conditions specified for configuration...")
    print("\nWhole Series Detection Conditions:\n")
    conditions = config.whole_series_detection_condition

    print("Use {} operator for multiple detection conditions".format(
        conditions.condition_operator))

    print("Smart Detection Condition:")
    print("- Sensitivity: {}".format(
        conditions.smart_detection_condition.sensitivity))
    print("- Detection direction: {}".format(
        conditions.smart_detection_condition.anomaly_detector_direction))
    print(
        "- Suppress conditions: minimum number: {}; minimum ratio: {}".format(
            conditions.smart_detection_condition.suppress_condition.min_number,
            conditions.smart_detection_condition.suppress_condition.min_ratio))

    print("Hard Threshold Condition:")
    print("- Lower bound: {}".format(
        conditions.hard_threshold_condition.lower_bound))
    print("- Upper bound: {}".format(
        conditions.hard_threshold_condition.upper_bound))
    print("- Detection direction: {}".format(
        conditions.smart_detection_condition.anomaly_detector_direction))
    print(
        "- Suppress conditions: minimum number: {}; minimum ratio: {}".format(
            conditions.smart_detection_condition.suppress_condition.min_number,
            conditions.smart_detection_condition.suppress_condition.min_ratio))

    print("Change Threshold Condition:")
    print("- Change percentage: {}".format(
        conditions.change_threshold_condition.change_percentage))
    print("- Shift point: {}".format(
        conditions.change_threshold_condition.shift_point))
    print("- Detect anomaly if within range: {}".format(
        conditions.change_threshold_condition.within_range))
    print("- Detection direction: {}".format(
        conditions.smart_detection_condition.anomaly_detector_direction))
    print(
        "- Suppress conditions: minimum number: {}; minimum ratio: {}".format(
            conditions.smart_detection_condition.suppress_condition.min_number,
            conditions.smart_detection_condition.suppress_condition.min_ratio))
def sample_get_data_feed(data_feed_id):
    # [START get_data_feed]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")

    client = MetricsAdvisorAdministrationClient(service_endpoint,
                                  MetricsAdvisorKeyCredential(subscription_key, api_key))

    data_feed = client.get_data_feed(data_feed_id)

    print("ID: {}".format(data_feed.id))
    print("Data feed name: {}".format(data_feed.name))
    print("Created time: {}".format(data_feed.created_time))
    print("Status: {}".format(data_feed.status))
    print("Source type: {}".format(data_feed.source.data_source_type))
    print("Granularity type: {}".format(data_feed.granularity.granularity_type))
    print("Data feed metrics: {}".format([metric.name for metric in data_feed.schema.metrics]))
    print("Data feed dimensions: {}".format([dimension.name for dimension in data_feed.schema.dimensions]))
    print("Data feed timestamp column: {}".format(data_feed.schema.timestamp_column))
    print("Ingestion data starting on: {}".format(data_feed.ingestion_settings.ingestion_begin_time))
    print("Data feed description: {}".format(data_feed.options.data_feed_description))
    print("Data feed rollup type: {}".format(data_feed.options.rollup_settings.rollup_type))
    print("Data feed rollup method: {}".format(data_feed.options.rollup_settings.rollup_method))
    print("Data feed fill setting: {}".format(data_feed.options.missing_data_point_fill_settings.fill_type))
    print("Data feed access mode: {}".format(data_feed.options.access_mode))
def sample_list_data_feeds():
    # [START list_data_feeds]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")

    client = MetricsAdvisorAdministrationClient(
        service_endpoint, MetricsAdvisorKeyCredential(subscription_key,
                                                      api_key))

    data_feeds = client.list_data_feeds()

    for feed in data_feeds:
        print("Data feed name: {}".format(feed.name))
        print("ID: {}".format(feed.id))
        print("Created time: {}".format(feed.created_time))
        print("Status: {}".format(feed.status))
        print("Source type: {}".format(feed.source.data_source_type))
        print("Granularity type: {}".format(feed.granularity.granularity_type))

        print("\nFeed metrics:")
        for metric in feed.schema.metrics:
            print(metric.name)

        print("\nFeed dimensions:")
        for dimension in feed.schema.dimensions:
            print(dimension.name)
def sample_update_data_feed(data_feed):
    # [START update_data_feed]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")

    client = MetricsAdvisorAdministrationClient(
        service_endpoint, MetricsAdvisorKeyCredential(subscription_key,
                                                      api_key))

    # update data feed on the data feed itself or by using available keyword arguments
    data_feed.name = "updated name"
    data_feed.data_feed_description = "updated description for data feed"

    updated = client.update_data_feed(data_feed,
                                      access_mode="Public",
                                      fill_type="CustomValue",
                                      custom_fill_value=1)
    print("Updated name: {}".format(updated.name))
    print("Updated description: {}".format(updated.data_feed_description))
    print("Updated access mode: {}".format(updated.access_mode))
    print("Updated fill setting, value: {}, {}".format(
        updated.missing_data_point_fill_settings.fill_type,
        updated.missing_data_point_fill_settings.custom_fill_value,
    ))
def sample_create_detection_config():
    # [START create_detection_config]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient
    from azure.ai.metricsadvisor.models import (
        ChangeThresholdCondition,
        HardThresholdCondition,
        SmartDetectionCondition,
        SuppressCondition,
        MetricDetectionCondition,
    )

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")
    metric_id = os.getenv("METRICS_ADVISOR_METRIC_ID")

    client = MetricsAdvisorAdministrationClient(service_endpoint,
                                  MetricsAdvisorKeyCredential(subscription_key, api_key))

    change_threshold_condition = ChangeThresholdCondition(
        anomaly_detector_direction="Both",
        change_percentage=20,
        shift_point=10,
        within_range=True,
        suppress_condition=SuppressCondition(
            min_number=5,
            min_ratio=2
        )
    )
    hard_threshold_condition = HardThresholdCondition(
        anomaly_detector_direction="Up",
        upper_bound=100,
        suppress_condition=SuppressCondition(
            min_number=2,
            min_ratio=2
        )
    )
    smart_detection_condition = SmartDetectionCondition(
        anomaly_detector_direction="Up",
        sensitivity=10,
        suppress_condition=SuppressCondition(
            min_number=2,
            min_ratio=2
        )
    )

    detection_config = client.create_detection_configuration(
        name="my_detection_config",
        metric_id=metric_id,
        description="anomaly detection config for metric",
        whole_series_detection_condition=MetricDetectionCondition(
            cross_conditions_operator="OR",
            change_threshold_condition=change_threshold_condition,
            hard_threshold_condition=hard_threshold_condition,
            smart_detection_condition=smart_detection_condition
        )
    )

    return detection_config
def sample_get_alert_config(alert_config_id):
    # [START get_anomaly_alert_config]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")

    client = MetricsAdvisorAdministrationClient(
        service_endpoint, MetricsAdvisorKeyCredential(subscription_key,
                                                      api_key))

    config = client.get_anomaly_alert_configuration(alert_config_id)

    print("Alert config ID: {}".format(config.id))
    print("Alert config name: {}".format(config.name))
    print("Description: {}".format(config.description))
    print("Ids of hooks associated with alert: {}".format(config.hook_ids))
    print("Use {} operator for multiple alert conditions\n".format(
        config.cross_metrics_operator))

    print("Alert uses detection configuration ID: {}".format(
        config.metric_alert_configurations[0].detection_configuration_id))
    print("Alert scope type: {}".format(
        config.metric_alert_configurations[0].alert_scope.scope_type))
    print("Alert severity condition: min- {}, max- {}".format(
        config.metric_alert_configurations[0].alert_conditions.
        severity_condition.min_alert_severity,
        config.metric_alert_configurations[0].alert_conditions.
        severity_condition.max_alert_severity,
    ))
    print("\nAlert uses detection configuration ID: {}".format(
        config.metric_alert_configurations[1].detection_configuration_id))
    print("Alert scope type: {}".format(
        config.metric_alert_configurations[1].alert_scope.scope_type))
    print("Top N: {}".format(config.metric_alert_configurations[1].alert_scope.
                             top_n_group_in_scope.top))
    print("Point count used to look back: {}".format(
        config.metric_alert_configurations[1].alert_scope.top_n_group_in_scope.
        period))
    print("Min top count: {}".format(
        config.metric_alert_configurations[1].alert_scope.top_n_group_in_scope.
        min_top_count))
    print("Alert metric boundary condition direction: {}, upper bound: {}".
          format(
              config.metric_alert_configurations[1].alert_conditions.
              metric_boundary_condition.direction,
              config.metric_alert_configurations[1].alert_conditions.
              metric_boundary_condition.upper,
          ))
    print("Alert snooze condition, snooze point count: {}".format(
        config.metric_alert_configurations[1].alert_snooze_condition.
        auto_snooze, ))
    print("Alert snooze scope: {}".format(
        config.metric_alert_configurations[1].alert_snooze_condition.
        snooze_scope, ))
    print("Snooze only for successive anomalies?: {}".format(
        config.metric_alert_configurations[1].alert_snooze_condition.
        only_for_successive, ))
def sample_create_data_feed():
    # [START create_data_feed]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient
    from azure.ai.metricsadvisor.models import (
        SQLServerDataFeed,
        DataFeedSchema,
        DataFeedMetric,
        DataFeedDimension,
        DataFeedOptions,
        DataFeedRollupSettings,
        DataFeedMissingDataPointFillSettings,
    )

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")
    sql_server_connection_string = os.getenv("METRICS_ADVISOR_SQL_SERVER_CONNECTION_STRING")
    query = os.getenv("METRICS_ADVISOR_SQL_SERVER_QUERY")

    client = MetricsAdvisorAdministrationClient(service_endpoint,
                                  MetricsAdvisorKeyCredential(subscription_key, api_key))

    data_feed = client.create_data_feed(
        name="My data feed",
        source=SQLServerDataFeed(
            connection_string=sql_server_connection_string,
            query=query,
        ),
        granularity="Daily",
        schema=DataFeedSchema(
            metrics=[
                DataFeedMetric(name="cost", display_name="Cost"),
                DataFeedMetric(name="revenue", display_name="Revenue")
            ],
            dimensions=[
                DataFeedDimension(name="category", display_name="Category"),
                DataFeedDimension(name="city", display_name="City")
            ],
            timestamp_column="Timestamp"
        ),
        ingestion_settings=datetime.datetime(2019, 10, 1),
        options=DataFeedOptions(
            data_feed_description="cost/revenue data feed",
            rollup_settings=DataFeedRollupSettings(
                rollup_type="AutoRollup",
                rollup_method="Sum",
                rollup_identification_value="__CUSTOM_SUM__"
            ),
            missing_data_point_fill_settings=DataFeedMissingDataPointFillSettings(
                fill_type="SmartFilling"
            ),
            access_mode="Private"
        )
    )

    return data_feed
def sample_create_alert_config():
    # [START create_alert_config]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient
    from azure.ai.metricsadvisor.models import (
        MetricAlertConfiguration,
        MetricAnomalyAlertScope,
        TopNGroupScope,
        MetricAnomalyAlertConditions,
        SeverityCondition,
        MetricBoundaryCondition,
        MetricAnomalyAlertSnoozeCondition,
    )
    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")
    detection_configuration_id = os.getenv(
        "METRICS_ADVISOR_DETECTION_CONFIGURATION_ID")
    hook_id = os.getenv("METRICS_ADVISOR_HOOK_ID")

    client = MetricsAdvisorAdministrationClient(
        service_endpoint, MetricsAdvisorKeyCredential(subscription_key,
                                                      api_key))

    alert_config = client.create_alert_configuration(
        name="my alert config",
        description="alert config description",
        cross_metrics_operator="AND",
        metric_alert_configurations=[
            MetricAlertConfiguration(
                detection_configuration_id=detection_configuration_id,
                alert_scope=MetricAnomalyAlertScope(scope_type="WholeSeries"),
                alert_conditions=MetricAnomalyAlertConditions(
                    severity_condition=SeverityCondition(
                        min_alert_severity="Low", max_alert_severity="High"))),
            MetricAlertConfiguration(
                detection_configuration_id=detection_configuration_id,
                alert_scope=MetricAnomalyAlertScope(
                    scope_type="TopN",
                    top_n_group_in_scope=TopNGroupScope(top=10,
                                                        period=5,
                                                        min_top_count=5)),
                alert_conditions=MetricAnomalyAlertConditions(
                    metric_boundary_condition=MetricBoundaryCondition(
                        direction="Up", upper=50)),
                alert_snooze_condition=MetricAnomalyAlertSnoozeCondition(
                    auto_snooze=2,
                    snooze_scope="Metric",
                    only_for_successive=True)),
        ],
        hook_ids=[hook_id])

    return alert_config
def sample_update_detection_config(detection_config):
    # [START update_detection_config]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient
    from azure.ai.metricsadvisor.models import (
        MetricSeriesGroupDetectionCondition,
        MetricSingleSeriesDetectionCondition,
        SmartDetectionCondition,
        SuppressCondition
    )

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")

    client = MetricsAdvisorAdministrationClient(service_endpoint,
                                  MetricsAdvisorKeyCredential(subscription_key, api_key))

    detection_config.name = "updated config name"
    detection_config.description = "updated with more detection conditions"
    smart_detection_condition = SmartDetectionCondition(
        anomaly_detector_direction="Up",
        sensitivity=10,
        suppress_condition=SuppressCondition(
            min_number=2,
            min_ratio=2
        )
    )

    client.update_detection_configuration(
        detection_config,
        series_group_detection_conditions=[
            MetricSeriesGroupDetectionCondition(
                series_group_key={"city": "Seoul"},
                smart_detection_condition=smart_detection_condition
            )
        ],
        series_detection_conditions=[
            MetricSingleSeriesDetectionCondition(
                series_key={"city": "Osaka", "category": "Cell Phones"},
                smart_detection_condition=smart_detection_condition
            )
        ]
    )
    updated = client.get_detection_configuration(detection_config.id)
    print("Updated detection name: {}".format(updated.name))
    print("Updated detection description: {}".format(updated.description))
    print("Updated detection condition for series group: {}".format(
        updated.series_group_detection_conditions[0].series_group_key
    ))
    print("Updated detection condition for series: {}".format(
        updated.series_detection_conditions[0].series_key
    ))
def sample_delete_datasource_credential(credential_id):
    # [START delete_datasource_credential]
    from azure.core.exceptions import ResourceNotFoundError
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")

    client = MetricsAdvisorAdministrationClient(
        service_endpoint, MetricsAdvisorKeyCredential(subscription_key,
                                                      api_key))

    client.delete_datasource_credential(credential_id)
예제 #11
0
def sample_list_hooks():
    # [START list_hooks]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")

    client = MetricsAdvisorAdministrationClient(service_endpoint,
                                  MetricsAdvisorKeyCredential(subscription_key, api_key))

    hooks = client.list_hooks()
    for hook in hooks:
        print("Hook type: {}".format(hook.hook_type))
        print("Hook name: {}".format(hook.name))
        print("Description: {}\n".format(hook.description))
예제 #12
0
async def sample_list_datasource_credentials_async():
    # [START list_datasource_credentials_async]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")

    client = MetricsAdvisorAdministrationClient(service_endpoint,
                                  MetricsAdvisorKeyCredential(subscription_key, api_key))

    credentials = client.list_datasource_credentials()
    async for credential in credentials:
        print("Credential type: {}".format(credential.credential_type))
        print("Credential name: {}".format(credential.name))
        print("Description: {}\n".format(credential.description))
def authentication_administration_client_with_aad():
    # [START administration_client_with_aad]
    from azure.ai.metricsadvisor import MetricsAdvisorAdministrationClient
    from azure.identity import DefaultAzureCredential

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    credential = DefaultAzureCredential()
    client = MetricsAdvisorAdministrationClient(service_endpoint, credential)
예제 #14
0
def sample_refresh_data_feed_ingestion():
    # [START refresh_data_feed_ingestion]
    import datetime
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")
    data_feed_id = os.getenv("METRICS_ADVISOR_DATA_FEED_ID")

    client = MetricsAdvisorAdministrationClient(
        service_endpoint, MetricsAdvisorKeyCredential(subscription_key,
                                                      api_key))

    client.refresh_data_feed_ingestion(data_feed_id,
                                       datetime.datetime(2020, 9, 20),
                                       datetime.datetime(2020, 9, 25))
예제 #15
0
def sample_list_alert_configs():
    # [START list_alert_configs]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")
    detection_configuration_id = os.getenv("METRICS_ADVISOR_DETECTION_CONFIGURATION_ID")

    client = MetricsAdvisorAdministrationClient(service_endpoint,
                                  MetricsAdvisorKeyCredential(subscription_key, api_key))

    configs = client.list_alert_configurations(detection_configuration_id)
    for config in configs:
        print("Alert config name: {}".format(config.name))
        print("Alert description: {}".format(config.description))
        print("Ids of hooks associated with alert: {}\n".format(config.hook_ids))
def sample_list_detection_configs():
    # [START list_detection_configs]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")
    metric_id = os.getenv("METRICS_ADVISOR_METRIC_ID")

    client = MetricsAdvisorAdministrationClient(service_endpoint,
                                  MetricsAdvisorKeyCredential(subscription_key, api_key))

    configs = client.list_detection_configurations(metric_id=metric_id)
    for config in configs:
        print("Detection config name: {}".format(config.name))
        print("Description: {}".format(config.description))
        print("Metric ID: {}\n".format(config.metric_id))
예제 #17
0
def sample_update_alert_config(alert_config):
    # [START update_alert_config]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient
    from azure.ai.metricsadvisor.models import (
        MetricAlertConfiguration,
        MetricAnomalyAlertScope,
        MetricAnomalyAlertConditions,
        MetricBoundaryCondition
    )
    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")
    detection_configuration_id = os.getenv("METRICS_ADVISOR_DETECTION_CONFIGURATION_ID")

    client = MetricsAdvisorAdministrationClient(service_endpoint,
                                  MetricsAdvisorKeyCredential(subscription_key, api_key))

    alert_config.name = "updated config name"
    additional_alert = MetricAlertConfiguration(
        detection_configuration_id=detection_configuration_id,
        alert_scope=MetricAnomalyAlertScope(
            scope_type="SeriesGroup",
            series_group_in_scope={'city': 'Shenzhen'}
        ),
        alert_conditions=MetricAnomalyAlertConditions(
            metric_boundary_condition=MetricBoundaryCondition(
                direction="Down",
                lower=5
            )
        )
    )
    alert_config.metric_alert_configurations.append(additional_alert)

    updated = client.update_alert_configuration(
        alert_config,
        cross_metrics_operator="OR",
        description="updated alert config"
    )

    print("Updated alert name: {}".format(updated.name))
    print("Updated alert description: {}".format(updated.description))
    print("Updated cross metrics operator: {}".format(updated.cross_metrics_operator))
    print("Updated alert condition configuration scope type: {}".format(
        updated.metric_alert_configurations[2].alert_scope.scope_type
    ))
def sample_update_datasource_credential(datasource_credential):
    # [START update_datasource_credential]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")

    client = MetricsAdvisorAdministrationClient(
        service_endpoint, MetricsAdvisorKeyCredential(subscription_key,
                                                      api_key))

    datasource_credential.description = "updated description"

    updated = client.update_datasource_credential(datasource_credential)
    print("Credential type: {}".format(updated.credential_type))
    print("Credential name: {}".format(updated.name))
    print("Description: {}\n".format(updated.description))
예제 #19
0
def sample_get_hook(hook_id):
    # [START get_hook]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")

    client = MetricsAdvisorAdministrationClient(service_endpoint,
                                  MetricsAdvisorKeyCredential(subscription_key, api_key))

    hook = client.get_hook(hook_id)

    print("Hook name: {}".format(hook.name))
    print("Description: {}".format(hook.description))
    print("Emails to alert: {}".format(hook.emails_to_alert))
    print("External link: {}".format(hook.external_link))
    print("Admins: {}".format(hook.admin_emails))
예제 #20
0
def sample_get_data_feed_ingestion_progress():
    # [START get_data_feed_ingestion_progress]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")
    data_feed_id = os.getenv("METRICS_ADVISOR_DATA_FEED_ID")

    client = MetricsAdvisorAdministrationClient(
        service_endpoint, MetricsAdvisorKeyCredential(subscription_key,
                                                      api_key))

    progress = client.get_data_feed_ingestion_progress(data_feed_id)

    print("Lastest active timestamp: {}".format(
        progress.latest_active_timestamp))
    print("Latest successful timestamp: {}".format(
        progress.latest_success_timestamp))
def authentication_administration_client_with_metrics_advisor_credential():
    # [START administration_client_with_metrics_advisor_credential]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")

    client = MetricsAdvisorAdministrationClient(service_endpoint,
                                  MetricsAdvisorKeyCredential(subscription_key, api_key))
def sample_update_hook(hook):
    # [START update_hook]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")

    client = MetricsAdvisorAdministrationClient(
        service_endpoint, MetricsAdvisorKeyCredential(subscription_key,
                                                      api_key))

    hook.name = "updated hook name"
    hook.description = "updated hook description"

    updated = client.update_hook(hook,
                                 emails_to_alert=["*****@*****.**"])
    print("Updated name: {}".format(updated.name))
    print("Updated description: {}".format(updated.description))
    print("Updated emails: {}".format(updated.emails_to_alert))
예제 #23
0
def sample_list_data_feed_ingestion_status():
    # [START list_data_feed_ingestion_status]
    import datetime
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")
    data_feed_id = os.getenv("METRICS_ADVISOR_DATA_FEED_ID")

    client = MetricsAdvisorAdministrationClient(
        service_endpoint, MetricsAdvisorKeyCredential(subscription_key,
                                                      api_key))

    ingestion_status = client.list_data_feed_ingestion_status(
        data_feed_id, datetime.datetime(2020, 9, 20),
        datetime.datetime(2020, 9, 25))
    for status in ingestion_status:
        print("Timestamp: {}".format(status.timestamp))
        print("Status: {}".format(status.status))
        print("Message: {}\n".format(status.message))
예제 #24
0
def sample_create_hook():
    # [START create_hook]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient
    from azure.ai.metricsadvisor.models import EmailNotificationHook

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")

    client = MetricsAdvisorAdministrationClient(service_endpoint,
                                  MetricsAdvisorKeyCredential(subscription_key, api_key))

    hook = client.create_hook(
        hook=EmailNotificationHook(
            name="email hook",
            description="my email hook",
            emails_to_alert=["*****@*****.**"],
            external_link="https://docs.microsoft.com/en-us/azure/cognitive-services/metrics-advisor/how-tos/alerts"
        )
    )

    return hook
예제 #25
0
def sample_create_hook():
    # [START create_hook]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient
    from azure.ai.metricsadvisor.models import EmailNotificationHook

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")

    client = MetricsAdvisorAdministrationClient(service_endpoint,
                                  MetricsAdvisorKeyCredential(subscription_key, api_key))

    hook = client.create_hook(
        hook=EmailNotificationHook(
            name="email hook",
            description="my email hook",
            emails_to_alert=["*****@*****.**"],
            external_link="https://adwiki.azurewebsites.net/articles/howto/alerts/create-hooks.html"
        )
    )

    return hook
def sample_create_datasource_credential():
    # [START create_datasource_credential]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient
    from azure.ai.metricsadvisor.models import DatasourceSqlConnectionString

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")
    connection_string = os.getenv("SQL_SERVER_CONNECTION_STRING")

    client = MetricsAdvisorAdministrationClient(
        service_endpoint, MetricsAdvisorKeyCredential(subscription_key,
                                                      api_key))

    credential = client.create_datasource_credential(
        datasource_credential=DatasourceSqlConnectionString(
            name="sql datasource credential",
            connection_string=connection_string,
            description="my datasource credential",
        ))

    return credential
예제 #27
0
def sample_delete_alert_config(alert_config_id):
    # [START delete_alert_config]
    from azure.core.exceptions import ResourceNotFoundError
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")

    client = MetricsAdvisorAdministrationClient(service_endpoint,
                                  MetricsAdvisorKeyCredential(subscription_key, api_key))

    client.delete_alert_configuration(alert_config_id)

    try:
        client.get_alert_configuration(alert_config_id)
    except ResourceNotFoundError:
        print("Alert configuration successfully deleted.")
예제 #28
0
    def __init__(self, method_name):
        super(TestMetricsAdvisorAdministrationClientBase, self).__init__(method_name)
        self.vcr.match_on = ["path", "method", "query"]
        if self.is_live:
            service_endpoint = self.get_settings_value("METRICS_ADVISOR_ENDPOINT")
            subscription_key = self.get_settings_value("METRICS_ADVISOR_SUBSCRIPTION_KEY")
            api_key = self.get_settings_value("METRICS_ADVISOR_API_KEY")
            self.sql_server_connection_string = self.get_settings_value("METRICS_ADVISOR_SQL_SERVER_CONNECTION_STRING")
            self.azure_table_connection_string = self.get_settings_value("METRICS_ADVISOR_AZURE_TABLE_CONNECTION_STRING")
            self.azure_blob_connection_string = self.get_settings_value("METRICS_ADVISOR_AZURE_BLOB_CONNECTION_STRING")
            self.azure_cosmosdb_connection_string = self.get_settings_value("METRICS_ADVISOR_COSMOS_DB_CONNECTION_STRING")
            self.http_request_get_url = self.get_settings_value("METRICS_ADVISOR_HTTP_GET_URL")
            self.http_request_post_url = self.get_settings_value("METRICS_ADVISOR_HTTP_POST_URL")
            self.application_insights_api_key = self.get_settings_value("METRICS_ADVISOR_APPLICATION_INSIGHTS_API_KEY")
            self.azure_data_explorer_connection_string = self.get_settings_value("METRICS_ADVISOR_AZURE_DATA_EXPLORER_CONNECTION_STRING")
            self.influxdb_connection_string = self.get_settings_value("METRICS_ADVISOR_INFLUX_DB_CONNECTION_STRING")
            self.influxdb_password = self.get_settings_value("METRICS_ADVISOR_INFLUX_DB_PASSWORD")
            self.azure_datalake_account_key = self.get_settings_value("METRICS_ADVISOR_AZURE_DATALAKE_ACCOUNT_KEY")
            self.mongodb_connection_string = self.get_settings_value("METRICS_ADVISOR_AZURE_MONGO_DB_CONNECTION_STRING")
            self.mysql_connection_string = self.get_settings_value("METRICS_ADVISOR_MYSQL_CONNECTION_STRING")
            self.postgresql_connection_string = self.get_settings_value("METRICS_ADVISOR_POSTGRESQL_CONNECTION_STRING")
            self.elasticsearch_auth_header = self.get_settings_value("METRICS_ADVISOR_ELASTICSEARCH_AUTH_HEADER")
            self.anomaly_detection_configuration_id = self.get_settings_value("METRICS_ADVISOR_ANOMALY_DETECTION_CONFIGURATION_ID")
            self.data_feed_id = self.get_settings_value("METRICS_ADVISOR_DATA_FEED_ID")
            self.metric_id = self.get_settings_value("METRICS_ADVISOR_METRIC_ID")
            self.scrubber.register_name_pair(
                self.sql_server_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.azure_table_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.azure_blob_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.azure_cosmosdb_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.http_request_get_url,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.http_request_post_url,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.application_insights_api_key,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.azure_data_explorer_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.influxdb_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.influxdb_password,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.azure_datalake_account_key,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.mongodb_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.mysql_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.postgresql_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.elasticsearch_auth_header,
                "connectionstring"
            )

            self.scrubber.register_name_pair(
                self.metric_id,
                "metric_id"
            )
            self.scrubber.register_name_pair(
                self.data_feed_id,
                "data_feed_id"
            )
            self.scrubber.register_name_pair(
                self.anomaly_detection_configuration_id,
                "anomaly_detection_configuration_id"
            )
        else:
            service_endpoint = "https://endpointname.cognitiveservices.azure.com"
            subscription_key = "METRICS_ADVISOR_SUBSCRIPTION_KEY"
            api_key = "METRICS_ADVISOR_API_KEY"
            self.sql_server_connection_string = "SQL_SERVER_CONNECTION_STRING"
            self.azure_table_connection_string = "AZURE_TABLE_CONNECTION_STRING"
            self.azure_blob_connection_string = "AZURE_BLOB_CONNECTION_STRING"
            self.azure_cosmosdb_connection_string = "COSMOS_DB_CONNECTION_STRING"
            self.http_request_get_url = "METRICS_ADVISOR_HTTP_GET_URL"
            self.http_request_post_url = "METRICS_ADVISOR_HTTP_POST_URL"
            self.application_insights_api_key = "METRICS_ADVISOR_APPLICATION_INSIGHTS_API_KEY"
            self.azure_data_explorer_connection_string = "METRICS_ADVISOR_AZURE_DATA_EXPLORER_CONNECTION_STRING"
            self.influxdb_connection_string = "METRICS_ADVISOR_INFLUXDB_CONNECTION_STRING"
            self.influxdb_password = "******"
            self.azure_datalake_account_key = "METRICS_ADVISOR_AZURE_DATALAKE_ACCOUNT_KEY"
            self.mongodb_connection_string = "METRICS_ADVISOR_AZURE_MONGODB_CONNECTION_STRING"
            self.mysql_connection_string = "METRICS_ADVISOR_MYSQL_CONNECTION_STRING"
            self.postgresql_connection_string = "METRICS_ADVISOR_POSTGRESQL_CONNECTION_STRING"
            self.elasticsearch_auth_header = "METRICS_ADVISOR_ELASTICSEARCH_AUTH"
            self.anomaly_detection_configuration_id = "anomaly_detection_configuration_id"
            self.metric_id = "metric_id"
            self.data_feed_id = "data_feed_id"
        self.admin_client = MetricsAdvisorAdministrationClient(service_endpoint,
                                                               MetricsAdvisorKeyCredential(subscription_key, api_key))
예제 #29
0
class TestMetricsAdvisorAdministrationClientBase(AzureTestCase):
    FILTER_HEADERS = ReplayableTest.FILTER_HEADERS + ['Ocp-Apim-Subscription-Key', 'x-api-key']

    def __init__(self, method_name):
        super(TestMetricsAdvisorAdministrationClientBase, self).__init__(method_name)
        self.vcr.match_on = ["path", "method", "query"]
        if self.is_live:
            service_endpoint = self.get_settings_value("METRICS_ADVISOR_ENDPOINT")
            subscription_key = self.get_settings_value("METRICS_ADVISOR_SUBSCRIPTION_KEY")
            api_key = self.get_settings_value("METRICS_ADVISOR_API_KEY")
            self.sql_server_connection_string = self.get_settings_value("METRICS_ADVISOR_SQL_SERVER_CONNECTION_STRING")
            self.azure_table_connection_string = self.get_settings_value("METRICS_ADVISOR_AZURE_TABLE_CONNECTION_STRING")
            self.azure_blob_connection_string = self.get_settings_value("METRICS_ADVISOR_AZURE_BLOB_CONNECTION_STRING")
            self.azure_cosmosdb_connection_string = self.get_settings_value("METRICS_ADVISOR_COSMOS_DB_CONNECTION_STRING")
            self.http_request_get_url = self.get_settings_value("METRICS_ADVISOR_HTTP_GET_URL")
            self.http_request_post_url = self.get_settings_value("METRICS_ADVISOR_HTTP_POST_URL")
            self.application_insights_api_key = self.get_settings_value("METRICS_ADVISOR_APPLICATION_INSIGHTS_API_KEY")
            self.azure_data_explorer_connection_string = self.get_settings_value("METRICS_ADVISOR_AZURE_DATA_EXPLORER_CONNECTION_STRING")
            self.influxdb_connection_string = self.get_settings_value("METRICS_ADVISOR_INFLUX_DB_CONNECTION_STRING")
            self.influxdb_password = self.get_settings_value("METRICS_ADVISOR_INFLUX_DB_PASSWORD")
            self.azure_datalake_account_key = self.get_settings_value("METRICS_ADVISOR_AZURE_DATALAKE_ACCOUNT_KEY")
            self.mongodb_connection_string = self.get_settings_value("METRICS_ADVISOR_AZURE_MONGO_DB_CONNECTION_STRING")
            self.mysql_connection_string = self.get_settings_value("METRICS_ADVISOR_MYSQL_CONNECTION_STRING")
            self.postgresql_connection_string = self.get_settings_value("METRICS_ADVISOR_POSTGRESQL_CONNECTION_STRING")
            self.elasticsearch_auth_header = self.get_settings_value("METRICS_ADVISOR_ELASTICSEARCH_AUTH_HEADER")
            self.anomaly_detection_configuration_id = self.get_settings_value("METRICS_ADVISOR_ANOMALY_DETECTION_CONFIGURATION_ID")
            self.data_feed_id = self.get_settings_value("METRICS_ADVISOR_DATA_FEED_ID")
            self.metric_id = self.get_settings_value("METRICS_ADVISOR_METRIC_ID")
            self.scrubber.register_name_pair(
                self.sql_server_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.azure_table_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.azure_blob_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.azure_cosmosdb_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.http_request_get_url,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.http_request_post_url,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.application_insights_api_key,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.azure_data_explorer_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.influxdb_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.influxdb_password,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.azure_datalake_account_key,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.mongodb_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.mysql_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.postgresql_connection_string,
                "connectionstring"
            )
            self.scrubber.register_name_pair(
                self.elasticsearch_auth_header,
                "connectionstring"
            )

            self.scrubber.register_name_pair(
                self.metric_id,
                "metric_id"
            )
            self.scrubber.register_name_pair(
                self.data_feed_id,
                "data_feed_id"
            )
            self.scrubber.register_name_pair(
                self.anomaly_detection_configuration_id,
                "anomaly_detection_configuration_id"
            )
        else:
            service_endpoint = "https://endpointname.cognitiveservices.azure.com"
            subscription_key = "METRICS_ADVISOR_SUBSCRIPTION_KEY"
            api_key = "METRICS_ADVISOR_API_KEY"
            self.sql_server_connection_string = "SQL_SERVER_CONNECTION_STRING"
            self.azure_table_connection_string = "AZURE_TABLE_CONNECTION_STRING"
            self.azure_blob_connection_string = "AZURE_BLOB_CONNECTION_STRING"
            self.azure_cosmosdb_connection_string = "COSMOS_DB_CONNECTION_STRING"
            self.http_request_get_url = "METRICS_ADVISOR_HTTP_GET_URL"
            self.http_request_post_url = "METRICS_ADVISOR_HTTP_POST_URL"
            self.application_insights_api_key = "METRICS_ADVISOR_APPLICATION_INSIGHTS_API_KEY"
            self.azure_data_explorer_connection_string = "METRICS_ADVISOR_AZURE_DATA_EXPLORER_CONNECTION_STRING"
            self.influxdb_connection_string = "METRICS_ADVISOR_INFLUXDB_CONNECTION_STRING"
            self.influxdb_password = "******"
            self.azure_datalake_account_key = "METRICS_ADVISOR_AZURE_DATALAKE_ACCOUNT_KEY"
            self.mongodb_connection_string = "METRICS_ADVISOR_AZURE_MONGODB_CONNECTION_STRING"
            self.mysql_connection_string = "METRICS_ADVISOR_MYSQL_CONNECTION_STRING"
            self.postgresql_connection_string = "METRICS_ADVISOR_POSTGRESQL_CONNECTION_STRING"
            self.elasticsearch_auth_header = "METRICS_ADVISOR_ELASTICSEARCH_AUTH"
            self.anomaly_detection_configuration_id = "anomaly_detection_configuration_id"
            self.metric_id = "metric_id"
            self.data_feed_id = "data_feed_id"
        self.admin_client = MetricsAdvisorAdministrationClient(service_endpoint,
                                                               MetricsAdvisorKeyCredential(subscription_key, api_key))

    def _create_data_feed(self, name):
        name = create_random_name(name)
        return self.admin_client.create_data_feed(
            DataFeed(
                name=name,
                source=SQLServerDataFeed(
                    connection_string=self.sql_server_connection_string,
                    query='select * from adsample2 where Timestamp = @StartTime'
                ),
                granularity=DataFeedGranularity(
                    granularity_type="Daily",
                ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost"),
                        DataFeedMetric(name="revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category"),
                        DataFeedDimension(name="city")
                    ],
                    timestamp_column="Timestamp"
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 10, 1),
                )
            )
        )

    def _create_data_feed_and_anomaly_detection_config(self, name):
        data_feed = self._create_data_feed(name)
        detection_config_name = create_random_name(name)
        detection_config = self.admin_client.create_metric_anomaly_detection_configuration(
            AnomalyDetectionConfiguration(
                name=detection_config_name,
                metric_id=data_feed.metric_ids[0],
                description="testing",
                whole_series_detection_condition=MetricDetectionCondition(
                    smart_detection_condition=SmartDetectionCondition(
                        sensitivity=50,
                        anomaly_detector_direction="Both",
                        suppress_condition=SuppressCondition(
                            min_number=50,
                            min_ratio=50
                        )
                    )
                )
            )
        )
        return detection_config, data_feed

    def _create_data_feed_for_update(self, name):
        data_feed_name = create_random_name(name)
        return self.admin_client.create_data_feed(
            DataFeed(
                name=data_feed_name,
                source=SQLServerDataFeed(
                    connection_string=self.sql_server_connection_string,
                    query='select * from adsample2 where Timestamp = @StartTime'
                ),
                granularity=DataFeedGranularity(
                    granularity_type="Daily",
                ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost", display_name="display cost", description="the cost"),
                        DataFeedMetric(name="revenue", display_name="display revenue", description="the revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category", display_name="display category"),
                        DataFeedDimension(name="city", display_name="display city")
                    ],
                    timestamp_column="Timestamp"
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 10, 1),
                    data_source_request_concurrency=0,
                    ingestion_retry_delay=-1,
                    ingestion_start_offset=-1,
                    stop_retry_after=-1,
                ),
                options=DataFeedOptions(
                    admin_emails=["*****@*****.**"],
                    data_feed_description="my first data feed",
                    missing_data_point_fill_settings=DataFeedMissingDataPointFillSettings(
                        fill_type="SmartFilling"
                    ),
                    rollup_settings=DataFeedRollupSettings(
                        rollup_type="NoRollup",
                        rollup_method="None",
                    ),
                    viewer_emails=["viewers"],
                    access_mode="Private",
                    action_link_template="action link template"
                )
            )
        )

    def _create_anomaly_alert_config_for_update(self, name):
        detection_config, data_feed = self._create_data_feed_and_anomaly_detection_config(name)
        alert_config_name = create_random_name(name)
        alert_config = self.admin_client.create_anomaly_alert_configuration(
            AnomalyAlertConfiguration(
                name=alert_config_name,
                cross_metrics_operator="AND",
                metric_alert_configurations=[
                    MetricAlertConfiguration(
                        detection_configuration_id=detection_config.id,
                        alert_scope=MetricAnomalyAlertScope(
                            scope_type="TopN",
                            top_n_group_in_scope=TopNGroupScope(
                                top=5,
                                period=10,
                                min_top_count=9
                            )
                        ),
                        alert_conditions=MetricAnomalyAlertConditions(
                            metric_boundary_condition=MetricBoundaryCondition(
                                direction="Both",
                                companion_metric_id=data_feed.metric_ids[0],
                                lower=1.0,
                                upper=5.0
                            )
                        )
                    ),
                    MetricAlertConfiguration(
                        detection_configuration_id=detection_config.id,
                        alert_scope=MetricAnomalyAlertScope(
                            scope_type="SeriesGroup",
                            series_group_in_scope={'city': 'Shenzhen'}
                        ),
                        alert_conditions=MetricAnomalyAlertConditions(
                            severity_condition=SeverityCondition(
                                min_alert_severity="Low",
                                max_alert_severity="High"
                            )
                        )
                    ),
                    MetricAlertConfiguration(
                        detection_configuration_id=detection_config.id,
                        alert_scope=MetricAnomalyAlertScope(
                            scope_type="WholeSeries"
                        ),
                        alert_conditions=MetricAnomalyAlertConditions(
                            severity_condition=SeverityCondition(
                                min_alert_severity="Low",
                                max_alert_severity="High"
                            )
                        )
                    )
                ],
                hook_ids=[]
            )
        )
        return alert_config, data_feed, detection_config

    def _create_detection_config_for_update(self, name):
        data_feed = self._create_data_feed(name)
        detection_config_name = create_random_name("testupdated")
        detection_config = self.admin_client.create_metric_anomaly_detection_configuration(
            AnomalyDetectionConfiguration(
                name=detection_config_name,
                metric_id=data_feed.metric_ids[0],
                description="My test metric anomaly detection configuration",
                whole_series_detection_condition=MetricDetectionCondition(
                    cross_conditions_operator="AND",
                    smart_detection_condition=SmartDetectionCondition(
                        sensitivity=50,
                        anomaly_detector_direction="Both",
                        suppress_condition=SuppressCondition(
                            min_number=50,
                            min_ratio=50
                        )
                    ),
                    hard_threshold_condition=HardThresholdCondition(
                        anomaly_detector_direction="Both",
                        suppress_condition=SuppressCondition(
                            min_number=5,
                            min_ratio=5
                        ),
                        lower_bound=0,
                        upper_bound=100
                    ),
                    change_threshold_condition=ChangeThresholdCondition(
                        change_percentage=50,
                        shift_point=30,
                        within_range=True,
                        anomaly_detector_direction="Both",
                        suppress_condition=SuppressCondition(
                            min_number=2,
                            min_ratio=2
                        )
                    )
                ),
                series_detection_conditions=[MetricSingleSeriesDetectionCondition(
                    series_key={"city": "Shenzhen", "category": "Jewelry"},
                    smart_detection_condition=SmartDetectionCondition(
                        anomaly_detector_direction="Both",
                        sensitivity=63,
                        suppress_condition=SuppressCondition(
                            min_number=1,
                            min_ratio=100
                        )
                    )
                )],
                series_group_detection_conditions=[MetricSeriesGroupDetectionCondition(
                    series_group_key={"city": "Sao Paulo"},
                    smart_detection_condition=SmartDetectionCondition(
                        anomaly_detector_direction="Both",
                        sensitivity=63,
                        suppress_condition=SuppressCondition(
                            min_number=1,
                            min_ratio=100
                        )
                    )
                )]
            )
        )
        return detection_config, data_feed

    def _create_email_hook_for_update(self, name):
        return self.admin_client.create_hook(
            hook=EmailNotificationHook(
                name=name,
                emails_to_alert=["*****@*****.**"],
                description="my email hook",
                external_link="external link"
            )
        )

    def _create_web_hook_for_update(self, name):
        return self.admin_client.create_hook(
            hook=WebNotificationHook(
                name=name,
                endpoint="https://httpbin.org/post",
                description="my web hook",
                external_link="external link",
                username="******",
                password="******"
            )
        )