Exemplo n.º 1
0
async def sample_create_data_feed_async():
    # [START create_data_feed_async]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential
    from azure.ai.metricsadvisor.aio import MetricsAdvisorAdministrationClient
    from azure.ai.metricsadvisor.models import (
        SQLServerDataFeed,
        DataFeedSchema,
        Metric,
        Dimension,
        DataFeedOptions,
        DataFeedRollupSettings,
        DataFeedMissingDataPointFillSettings
    )

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")
    sql_server_connection_string = os.getenv("METRICS_ADVISOR_SQL_SERVER_CONNECTION_STRING")
    query = os.getenv("METRICS_ADVISOR_SQL_SERVER_QUERY")

    client = MetricsAdvisorAdministrationClient(service_endpoint,
                                  MetricsAdvisorKeyCredential(subscription_key, api_key))

    async with client:
        data_feed = await client.create_data_feed(
            name="My data feed",
            source=SQLServerDataFeed(
                connection_string=sql_server_connection_string,
                query=query,
            ),
            granularity="Daily",
            schema=DataFeedSchema(
                metrics=[
                    Metric(name="cost", display_name="Cost"),
                    Metric(name="revenue", display_name="Revenue")
                ],
                dimensions=[
                    Dimension(name="category", display_name="Category"),
                    Dimension(name="city", display_name="City")
                ],
                timestamp_column="Timestamp"
            ),
            ingestion_settings=datetime.datetime(2019, 10, 1),
            options=DataFeedOptions(
                data_feed_description="cost/revenue data feed",
                rollup_settings=DataFeedRollupSettings(
                    rollup_type="AutoRollup",
                    rollup_method="Sum",
                    rollup_identification_value="__CUSTOM_SUM__"
                ),
                missing_data_point_fill_settings=DataFeedMissingDataPointFillSettings(
                    fill_type="SmartFilling"
                ),
                access_mode="Private"
            )
        )

        return data_feed
    async def _create_data_feed_for_update(self, name):
        data_feed_name = self.create_random_name(name)
        return await self.admin_client.create_data_feed(
            name=data_feed_name,
            source=SQLServerDataFeed(
                connection_string=self.sql_server_connection_string,
                query=u"select * from adsample2 where Timestamp = @StartTime"
            ),
            granularity=DataFeedGranularity(
                granularity_type="Daily",
            ),
            schema=DataFeedSchema(
                metrics=[
                    Metric(name="cost", display_name="display cost", description="the cost"),
                    Metric(name="revenue", display_name="display revenue", description="the revenue")
                ],
                dimensions=[
                    Dimension(name="category", display_name="display category"),
                    Dimension(name="city", display_name="display city")
                ],
                timestamp_column="Timestamp"
            ),
            ingestion_settings=DataFeedIngestionSettings(
                ingestion_begin_time=datetime.datetime(2019, 10, 1),
                data_source_request_concurrency=0,
                ingestion_retry_delay=-1,
                ingestion_start_offset=-1,
                stop_retry_after=-1,
            ),
            options=DataFeedOptions(
                admins=["*****@*****.**"],
                data_feed_description="my first data feed",
                missing_data_point_fill_settings=DataFeedMissingDataPointFillSettings(
                    fill_type="SmartFilling"
                ),
                rollup_settings=DataFeedRollupSettings(
                    rollup_type="NoRollup",
                    rollup_method="None",
                ),
                viewers=["viewers"],
                access_mode="Private",
                action_link_template="action link template"
            )

        )
 def _create_data_feed(self, name):
     name = self.create_random_name(name)
     return self.admin_client.create_data_feed(
         name=name,
         source=SQLServerDataFeed(
             connection_string=self.sql_server_connection_string,
             query="select * from adsample2 where Timestamp = @StartTime"),
         granularity="Daily",
         schema=DataFeedSchema(
             metrics=[Metric(name="cost"),
                      Metric(name="revenue")],
             dimensions=[
                 Dimension(name="category"),
                 Dimension(name="city")
             ],
         ),
         ingestion_settings="2019-10-01T00:00:00Z",
     )
    def test_create_data_feed_with_application_insights(self):
        name = self.create_random_name("applicationinsights")
        try:
            query = "let gran=60m; let starttime=datetime(@StartTime); let endtime=starttime + gran; requests | " \
                "where timestamp >= starttime and timestamp < endtime | summarize request_count = count(), " \
                "duration_avg_ms = avg(duration), duration_95th_ms = percentile(duration, 95), " \
                "duration_max_ms = max(duration) by resultCode"
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=AzureApplicationInsightsDataFeed(
                    azure_cloud="Azure",
                    application_id="3706fe8b-98f1-47c7-bf69-b73b6e53274d",
                    api_key=self.application_insights_api_key,
                    query=query
                ),
                granularity=DataFeedGranularity(
                    granularity_type="Daily",
                ),
                schema=DataFeedSchema(
                    metrics=[
                        Metric(name="cost"),
                        Metric(name="revenue")
                    ],
                    dimensions=[
                        Dimension(name="category"),
                        Dimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2020, 7, 1),
                ),

            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "AzureApplicationInsights")
            self.assertIsNotNone(data_feed.source.api_key)
            self.assertEqual(data_feed.source.application_id, "3706fe8b-98f1-47c7-bf69-b73b6e53274d")
            self.assertIsNotNone(data_feed.source.query)

        finally:
            self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_datalake(self):
        name = self.create_random_name("datalake")
        try:
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=AzureDataLakeStorageGen2DataFeed(
                    account_name="adsampledatalakegen2",
                    account_key=self.azure_datalake_account_key,
                    file_system_name="adsample",
                    directory_template="%Y/%m/%d",
                    file_template="adsample.json"
                ),
                granularity=DataFeedGranularity(
                    granularity_type="Daily",
                ),
                schema=DataFeedSchema(
                    metrics=[
                        Metric(name="cost", display_name="Cost"),
                        Metric(name="revenue", display_name="Revenue")
                    ],
                    dimensions=[
                        Dimension(name="category", display_name="Category"),
                        Dimension(name="city", display_name="City")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 1, 1),
                ),

            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "AzureDataLakeStorageGen2")
            self.assertIsNotNone(data_feed.source.account_key)
            self.assertEqual(data_feed.source.account_name, "adsampledatalakegen2")
            self.assertEqual(data_feed.source.file_system_name, "adsample")
            self.assertEqual(data_feed.source.directory_template, "%Y/%m/%d")
            self.assertEqual(data_feed.source.file_template, "adsample.json")

        finally:
            self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_influxdb(self):
        name = self.create_random_name("influxdb")
        try:
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=InfluxDBDataFeed(
                    connection_string=self.influxdb_connection_string,
                    database="adsample",
                    user_name="adreadonly",
                    password=self.influxdb_password,
                    query="'select * from adsample2 where Timestamp = @StartTime'"
                ),
                granularity=DataFeedGranularity(
                    granularity_type="Daily",
                ),
                schema=DataFeedSchema(
                    metrics=[
                        Metric(name="cost"),
                        Metric(name="revenue")
                    ],
                    dimensions=[
                        Dimension(name="category"),
                        Dimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 1, 1),
                ),

            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "InfluxDB")
            self.assertIsNotNone(data_feed.source.connection_string)
            self.assertIsNotNone(data_feed.source.query)
            self.assertIsNotNone(data_feed.source.password)
            self.assertEqual(data_feed.source.database, "adsample")
            self.assertEqual(data_feed.source.user_name, "adreadonly")

        finally:
            self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_elasticsearch(self):
        name = self.create_random_name("elastic")
        try:
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=ElasticsearchDataFeed(
                    host="ad-sample-es.westus2.cloudapp.azure.com",
                    port="9200",
                    auth_header=self.elasticsearch_auth_header,
                    query="'select * from adsample where timestamp = @StartTime'"
                ),
                granularity=DataFeedGranularity(
                    granularity_type="Daily",
                ),
                schema=DataFeedSchema(
                    metrics=[
                        Metric(name="cost", display_name="Cost"),
                        Metric(name="revenue", display_name="Revenue")
                    ],
                    dimensions=[
                        Dimension(name="category", display_name="Category"),
                        Dimension(name="city", display_name="City")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 1, 1),
                ),

            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "Elasticsearch")
            self.assertIsNotNone(data_feed.source.auth_header)
            self.assertEqual(data_feed.source.port, "9200")
            self.assertEqual(data_feed.source.host, "ad-sample-es.westus2.cloudapp.azure.com")
            self.assertEqual(data_feed.source.query, "'select * from adsample where timestamp = @StartTime'")

        finally:
            self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_azure_cosmos_db(self):
        name = self.create_random_name("cosmosfeed")
        try:
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=AzureCosmosDBDataFeed(
                    connection_string=self.azure_cosmosdb_connection_string,
                    sql_query="'SELECT * FROM Items I where I.Timestamp >= @StartTime and I.Timestamp < @EndTime'",
                    database="adsample",
                    collection_id="adsample"
                ),
                granularity=DataFeedGranularity(
                    granularity_type="Daily",
                ),
                schema=DataFeedSchema(
                    metrics=[
                        Metric(name="cost"),
                        Metric(name="revenue")
                    ],
                    dimensions=[
                        Dimension(name="category"),
                        Dimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 10, 1),
                ),

            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "AzureCosmosDB")
            self.assertIsNotNone(data_feed.source.connection_string)
            self.assertEqual(data_feed.source.database, "adsample")
            self.assertEqual(data_feed.source.collection_id, "adsample")
            self.assertEqual(data_feed.source.sql_query, "'SELECT * FROM Items I where I.Timestamp >= @StartTime and I.Timestamp < @EndTime'")
        finally:
            self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_mongodb(self):
        name = self.create_random_name("mongodb")
        try:
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=MongoDBDataFeed(
                    connection_string=self.mongodb_connection_string,
                    database="adsample",
                    command='{"find": "adsample", "filter": { Timestamp: { $eq: @StartTime }} "batchSize": 2000,}'
                ),
                granularity=DataFeedGranularity(
                    granularity_type="Daily",
                ),
                schema=DataFeedSchema(
                    metrics=[
                        Metric(name="cost"),
                        Metric(name="revenue")
                    ],
                    dimensions=[
                        Dimension(name="category"),
                        Dimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 1, 1),
                ),

            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "MongoDB")
            self.assertIsNotNone(data_feed.source.connection_string)
            self.assertEqual(data_feed.source.database, "adsample")
            self.assertEqual(data_feed.source.command, '{"find": "adsample", "filter": { Timestamp: { $eq: @StartTime }} "batchSize": 2000,}')

        finally:
            self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_data_explorer(self):
        name = self.create_random_name("azuredataexplorer")
        try:
            query = "let StartDateTime = datetime(@StartTime); let EndDateTime = StartDateTime + 1d; " \
                    "adsample | where Timestamp >= StartDateTime and Timestamp < EndDateTime"
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=AzureDataExplorerDataFeed(
                    connection_string=self.azure_data_explorer_connection_string,
                    query=query
                ),
                granularity=DataFeedGranularity(
                    granularity_type="Daily",
                ),
                schema=DataFeedSchema(
                    metrics=[
                        Metric(name="cost"),
                        Metric(name="revenue")
                    ],
                    dimensions=[
                        Dimension(name="category"),
                        Dimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 1, 1),
                ),

            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "AzureDataExplorer")
            self.assertIsNotNone(data_feed.source.connection_string)
            self.assertEqual(data_feed.source.query, query)

        finally:
            self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_http_request_post(self):
        name = self.create_random_name("httprequestfeedpost")
        try:
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=HttpRequestDataFeed(
                    url=self.http_request_post_url,
                    http_method="POST",
                    payload="{'startTime': '@StartTime'}"
                ),
                granularity=DataFeedGranularity(
                    granularity_type="Daily",
                ),
                schema=DataFeedSchema(
                    metrics=[
                        Metric(name="cost"),
                        Metric(name="revenue")
                    ],
                    dimensions=[
                        Dimension(name="category"),
                        Dimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 10, 1),
                ),

            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "HttpRequest")
            self.assertIsNotNone(data_feed.source.url)
            self.assertEqual(data_feed.source.http_method, "POST")
            self.assertEqual(data_feed.source.payload, "{'startTime': '@StartTime'}")
        finally:
            self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_azure_blob(self):
        name = self.create_random_name("blobfeed")
        try:
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=AzureBlobDataFeed(
                    connection_string=self.azure_blob_connection_string,
                    container="adsample",
                    blob_template="%Y/%m/%d/%h/JsonFormatV2.json"
                ),
                granularity=DataFeedGranularity(
                    granularity_type="Daily",
                ),
                schema=DataFeedSchema(
                    metrics=[
                        Metric(name="cost"),
                        Metric(name="revenue")
                    ],
                    dimensions=[
                        Dimension(name="category"),
                        Dimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 10, 1),
                ),

            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "AzureBlob")
            self.assertIsNotNone(data_feed.source.connection_string)
            self.assertEqual(data_feed.source.container, "adsample")
            self.assertEqual(data_feed.source.blob_template, "%Y/%m/%d/%h/JsonFormatV2.json")
        finally:
            self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_azure_table(self):
        name = self.create_random_name("tablefeed")
        try:
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=AzureTableDataFeed(
                    connection_string=self.azure_table_connection_string,
                    query="PartitionKey ge '@StartTime' and PartitionKey lt '@EndTime'",
                    table="adsample"
                ),
                granularity=DataFeedGranularity(
                    granularity_type="Daily",
                ),
                schema=DataFeedSchema(
                    metrics=[
                        Metric(name="cost"),
                        Metric(name="revenue")
                    ],
                    dimensions=[
                        Dimension(name="category"),
                        Dimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 10, 1),
                ),

            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "AzureTable")
            self.assertIsNotNone(data_feed.source.connection_string)
            self.assertEqual(data_feed.source.table, "adsample")
            self.assertEqual(data_feed.source.query, "PartitionKey ge '@StartTime' and PartitionKey lt '@EndTime'")
        finally:
            self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_postgresql(self):
        name = self.create_random_name("postgresql")
        try:
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=PostgreSqlDataFeed(
                    connection_string=self.postgresql_connection_string,
                    query="'select * from adsample2 where Timestamp = @StartTime'"
                ),
                granularity=DataFeedGranularity(
                    granularity_type="Daily",
                ),
                schema=DataFeedSchema(
                    metrics=[
                        Metric(name="cost"),
                        Metric(name="revenue")
                    ],
                    dimensions=[
                        Dimension(name="category"),
                        Dimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 1, 1),
                ),

            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "PostgreSql")
            self.assertIsNotNone(data_feed.source.connection_string)
            self.assertEqual(data_feed.source.query, "'select * from adsample2 where Timestamp = @StartTime'")

        finally:
            self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_from_sql_server(self):

        data_feed_name = self.create_random_name("testfeed")
        try:
            data_feed = self.admin_client.create_data_feed(
                name=data_feed_name,
                source=SQLServerDataFeed(
                    connection_string=self.sql_server_connection_string,
                    query=u"select * from adsample2 where Timestamp = @StartTime"
                ),
                granularity=DataFeedGranularity(
                    granularity_type="Daily",
                ),
                schema=DataFeedSchema(
                    metrics=[
                        Metric(name="cost", display_name="display cost", description="the cost"),
                        Metric(name="revenue", display_name="display revenue", description="the revenue")
                    ],
                    dimensions=[
                        Dimension(name="category", display_name="display category"),
                        Dimension(name="city", display_name="display city")
                    ],
                    timestamp_column="Timestamp"
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 10, 1),
                    data_source_request_concurrency=0,
                    ingestion_retry_delay=-1,
                    ingestion_start_offset=-1,
                    stop_retry_after=-1,
                ),
                options=DataFeedOptions(
                    admins=["*****@*****.**"],
                    data_feed_description="my first data feed",
                    missing_data_point_fill_settings=DataFeedMissingDataPointFillSettings(
                        fill_type="SmartFilling"
                    ),
                    rollup_settings=DataFeedRollupSettings(
                        rollup_type="NoRollup",
                        rollup_method="None",
                    ),
                    viewers=["viewers"],
                    access_mode="Private",
                    action_link_template="action link template"
                )

            )
            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "SqlServer")
            self.assertIsNotNone(data_feed.source.connection_string)
            self.assertIsNotNone(data_feed.source.query)
            self.assertEqual(data_feed.granularity.granularity_type, "Daily")
            self.assertEqual(data_feed.granularity.custom_granularity_value, None)
            self.assertEqual(data_feed.schema.metrics[0].name, "cost")
            self.assertEqual(data_feed.schema.metrics[1].name, "revenue")
            self.assertEqual(data_feed.schema.metrics[0].display_name, "display cost")
            self.assertEqual(data_feed.schema.metrics[1].display_name, "display revenue")
            self.assertEqual(data_feed.schema.metrics[0].description, "the cost")
            self.assertEqual(data_feed.schema.metrics[1].description, "the revenue")
            self.assertEqual(data_feed.schema.dimensions[0].name, "category")
            self.assertEqual(data_feed.schema.dimensions[1].name, "city")
            self.assertEqual(data_feed.schema.dimensions[0].display_name, "display category")
            self.assertEqual(data_feed.schema.dimensions[1].display_name, "display city")
            self.assertEqual(data_feed.ingestion_settings.ingestion_begin_time,
                             datetime.datetime(2019, 10, 1, tzinfo=tzutc()))
            self.assertEqual(data_feed.ingestion_settings.data_source_request_concurrency, 0)
            self.assertEqual(data_feed.ingestion_settings.ingestion_retry_delay, -1)
            self.assertEqual(data_feed.ingestion_settings.ingestion_start_offset, -1)
            self.assertEqual(data_feed.ingestion_settings.stop_retry_after, -1)
            self.assertIn("*****@*****.**", data_feed.options.admins)
            self.assertEqual(data_feed.options.data_feed_description, "my first data feed")
            self.assertEqual(data_feed.options.missing_data_point_fill_settings.fill_type, "SmartFilling")
            self.assertEqual(data_feed.options.rollup_settings.rollup_type, "NoRollup")
            self.assertEqual(data_feed.options.rollup_settings.rollup_method, "None")
            self.assertEqual(data_feed.options.viewers, ["viewers"])
            self.assertEqual(data_feed.options.access_mode, "Private")
            self.assertEqual(data_feed.options.action_link_template, "action link template")
            self.assertEqual(data_feed.status, "Active")
            self.assertTrue(data_feed.is_admin)
            self.assertIsNotNone(data_feed.metric_ids)

        finally:
            self.admin_client.delete_data_feed(data_feed.id)

            with self.assertRaises(ResourceNotFoundError):
                self.admin_client.get_data_feed(data_feed.id)