def test_create_data_feed_with_data_explorer(self):
        name = self.create_random_name("azuredataexplorer")
        try:
            query = "let StartDateTime = datetime(@StartTime); let EndDateTime = StartDateTime + 1d; " \
                    "adsample | where Timestamp >= StartDateTime and Timestamp < EndDateTime"
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=AzureDataExplorerDataFeedSource(
                    connection_string=self.
                    azure_data_explorer_connection_string,
                    query=query),
                granularity=DataFeedGranularity(granularity_type="Daily", ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost"),
                        DataFeedMetric(name="revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category"),
                        DataFeedDimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 1, 1), ),
            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type,
                             "AzureDataExplorer")
            self.assertEqual(data_feed.source.query, query)

        finally:
            self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_azure_blob(self):
        name = self.create_random_name("blobfeed")
        try:
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=AzureBlobDataFeedSource(
                    connection_string=self.azure_blob_connection_string,
                    container="adsample",
                    blob_template="%Y/%m/%d/%h/JsonFormatV2.json"),
                granularity=DataFeedGranularity(granularity_type="Daily", ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost"),
                        DataFeedMetric(name="revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category"),
                        DataFeedDimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 10, 1), ),
            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "AzureBlob")
            self.assertEqual(data_feed.source.container, "adsample")
            self.assertEqual(data_feed.source.blob_template,
                             "%Y/%m/%d/%h/JsonFormatV2.json")
        finally:
            self.admin_client.delete_data_feed(data_feed.id)
Exemplo n.º 3
0
 def _create_data_feed(self, name):
     name = create_random_name(name)
     return self.admin_client.create_data_feed(
         DataFeed(
             name=name,
             source=SQLServerDataFeed(
                 connection_string=self.sql_server_connection_string,
                 query='select * from adsample2 where Timestamp = @StartTime'
             ),
             granularity=DataFeedGranularity(
                 granularity_type="Daily",
             ),
             schema=DataFeedSchema(
                 metrics=[
                     DataFeedMetric(name="cost"),
                     DataFeedMetric(name="revenue")
                 ],
                 dimensions=[
                     DataFeedDimension(name="category"),
                     DataFeedDimension(name="city")
                 ],
                 timestamp_column="Timestamp"
             ),
             ingestion_settings=DataFeedIngestionSettings(
                 ingestion_begin_time=datetime.datetime(2019, 10, 1),
             )
         )
     )
    def test_create_data_feed_with_postgresql(self):
        name = self.create_random_name("postgresql")
        try:
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=PostgreSqlDataFeedSource(
                    connection_string=self.postgresql_connection_string,
                    query=
                    "'select * from adsample2 where Timestamp = @StartTime'"),
                granularity=DataFeedGranularity(granularity_type="Daily", ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost"),
                        DataFeedMetric(name="revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category"),
                        DataFeedDimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 1, 1), ),
            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "PostgreSql")
            self.assertEqual(
                data_feed.source.query,
                "'select * from adsample2 where Timestamp = @StartTime'")

        finally:
            self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_azure_table(self):
        name = self.create_random_name("tablefeed")
        try:
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=AzureTableDataFeedSource(
                    connection_string=self.azure_table_connection_string,
                    query=
                    "PartitionKey ge '@StartTime' and PartitionKey lt '@EndTime'",
                    table="adsample"),
                granularity=DataFeedGranularity(granularity_type="Daily", ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost"),
                        DataFeedMetric(name="revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category"),
                        DataFeedDimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 10, 1), ),
            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "AzureTable")
            self.assertEqual(data_feed.source.table, "adsample")
            self.assertEqual(
                data_feed.source.query,
                "PartitionKey ge '@StartTime' and PartitionKey lt '@EndTime'")
        finally:
            self.admin_client.delete_data_feed(data_feed.id)
def sample_create_data_feed():
    # [START create_data_feed]
    from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient
    from azure.ai.metricsadvisor.models import (
        SQLServerDataFeed,
        DataFeedSchema,
        DataFeedMetric,
        DataFeedDimension,
        DataFeedOptions,
        DataFeedRollupSettings,
        DataFeedMissingDataPointFillSettings,
    )

    service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
    subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
    api_key = os.getenv("METRICS_ADVISOR_API_KEY")
    sql_server_connection_string = os.getenv("METRICS_ADVISOR_SQL_SERVER_CONNECTION_STRING")
    query = os.getenv("METRICS_ADVISOR_SQL_SERVER_QUERY")

    client = MetricsAdvisorAdministrationClient(service_endpoint,
                                  MetricsAdvisorKeyCredential(subscription_key, api_key))

    data_feed = client.create_data_feed(
        name="My data feed",
        source=SQLServerDataFeed(
            connection_string=sql_server_connection_string,
            query=query,
        ),
        granularity="Daily",
        schema=DataFeedSchema(
            metrics=[
                DataFeedMetric(name="cost", display_name="Cost"),
                DataFeedMetric(name="revenue", display_name="Revenue")
            ],
            dimensions=[
                DataFeedDimension(name="category", display_name="Category"),
                DataFeedDimension(name="city", display_name="City")
            ],
            timestamp_column="Timestamp"
        ),
        ingestion_settings=datetime.datetime(2019, 10, 1),
        options=DataFeedOptions(
            data_feed_description="cost/revenue data feed",
            rollup_settings=DataFeedRollupSettings(
                rollup_type="AutoRollup",
                rollup_method="Sum",
                rollup_identification_value="__CUSTOM_SUM__"
            ),
            missing_data_point_fill_settings=DataFeedMissingDataPointFillSettings(
                fill_type="SmartFilling"
            ),
            access_mode="Private"
        )
    )

    return data_feed
    def create_data_feed(self, name):
        name = self.create_random_name(name)
        if is_live():
            self.variables["data_feed_name"] = name
        data_feed = self.client.create_data_feed(
            name=self.variables["data_feed_name"],
            source=SqlServerDataFeedSource(
                connection_string=os.getenv("METRICS_ADVISOR_SQL_SERVER_CONNECTION_STRING", "metrics_advisor_sql_server_connection_string"),
                query="select * from adsample2 where Timestamp = @StartTime"
            ),
            granularity=DataFeedGranularity(
                granularity_type="Daily",
            ),
            schema=DataFeedSchema(
                metrics=[
                    DataFeedMetric(name="cost", description="the cost"),
                    DataFeedMetric(name="revenue", description="the revenue")
                ],
                dimensions=[
                    DataFeedDimension(name="category"),
                    DataFeedDimension(name="region")
                ],
                timestamp_column="Timestamp"
            ),
            ingestion_settings=DataFeedIngestionSettings(
                ingestion_begin_time=datetime.datetime(2019, 10, 1),
                data_source_request_concurrency=0,
                ingestion_retry_delay=-1,
                ingestion_start_offset=-1,
                stop_retry_after=-1,
            ),
            admins=["*****@*****.**"],
            data_feed_description="my first data feed",
            missing_data_point_fill_settings=DataFeedMissingDataPointFillSettings(
                fill_type="SmartFilling"
            ),
            rollup_settings=DataFeedRollupSettings(
                rollup_type="NoRollup",
                rollup_method="None",
            ),
            viewers=["viewers"],
            access_mode="Private",
            action_link_template="action link template"
        )

        if is_live():
            self.variables["data_feed_id"] = data_feed.id
            self.variables["data_feed_metric_id"] = data_feed.metric_ids['cost']
        return data_feed
Exemplo n.º 8
0
 def _create_data_feed_for_update(self, name):
     data_feed_name = create_random_name(name)
     return self.admin_client.create_data_feed(
         DataFeed(
             name=data_feed_name,
             source=SQLServerDataFeed(
                 connection_string=self.sql_server_connection_string,
                 query='select * from adsample2 where Timestamp = @StartTime'
             ),
             granularity=DataFeedGranularity(
                 granularity_type="Daily",
             ),
             schema=DataFeedSchema(
                 metrics=[
                     DataFeedMetric(name="cost", display_name="display cost", description="the cost"),
                     DataFeedMetric(name="revenue", display_name="display revenue", description="the revenue")
                 ],
                 dimensions=[
                     DataFeedDimension(name="category", display_name="display category"),
                     DataFeedDimension(name="city", display_name="display city")
                 ],
                 timestamp_column="Timestamp"
             ),
             ingestion_settings=DataFeedIngestionSettings(
                 ingestion_begin_time=datetime.datetime(2019, 10, 1),
                 data_source_request_concurrency=0,
                 ingestion_retry_delay=-1,
                 ingestion_start_offset=-1,
                 stop_retry_after=-1,
             ),
             options=DataFeedOptions(
                 admin_emails=["*****@*****.**"],
                 data_feed_description="my first data feed",
                 missing_data_point_fill_settings=DataFeedMissingDataPointFillSettings(
                     fill_type="SmartFilling"
                 ),
                 rollup_settings=DataFeedRollupSettings(
                     rollup_type="NoRollup",
                     rollup_method="None",
                 ),
                 viewer_emails=["viewers"],
                 access_mode="Private",
                 action_link_template="action link template"
             )
         )
     )
    async def test_create_data_feed_with_application_insights(self):
        name = self.create_random_name("applicationinsightsasync")
        async with self.admin_client:
            try:
                query = "let gran=60m; let starttime=datetime(@StartTime); let endtime=starttime + gran; requests | " \
                    "where timestamp >= starttime and timestamp < endtime | summarize request_count = count(), " \
                    "duration_avg_ms = avg(duration), duration_95th_ms = percentile(duration, 95), " \
                    "duration_max_ms = max(duration) by resultCode"
                data_feed = await self.admin_client.create_data_feed(
                    name=name,
                    source=AzureApplicationInsightsDataFeedSource(
                        azure_cloud="Azure",
                        application_id="3706fe8b-98f1-47c7-bf69-b73b6e53274d",
                        api_key=self.application_insights_api_key,
                        query=query
                    ),
                    granularity=DataFeedGranularity(
                        granularity_type="Daily",
                    ),
                    schema=DataFeedSchema(
                        metrics=[
                            DataFeedMetric(name="cost"),
                            DataFeedMetric(name="revenue")
                        ],
                        dimensions=[
                            DataFeedDimension(name="category"),
                            DataFeedDimension(name="city")
                        ],
                    ),
                    ingestion_settings=DataFeedIngestionSettings(
                        ingestion_begin_time=datetime.datetime(2021, 7, 1),
                    ),

                )

                self.assertIsNotNone(data_feed.id)
                self.assertIsNotNone(data_feed.created_time)
                self.assertIsNotNone(data_feed.name)
                self.assertEqual(data_feed.source.data_source_type, "AzureApplicationInsights")
                self.assertEqual(data_feed.source.application_id, "3706fe8b-98f1-47c7-bf69-b73b6e53274d")
                self.assertIsNotNone(data_feed.source.query)

            finally:
                await self.admin_client.delete_data_feed(data_feed.id)
    async def test_create_data_feed_with_datalake(self):
        name = self.create_random_name("datalakeasync")
        async with self.admin_client:
            try:
                data_feed = await self.admin_client.create_data_feed(
                    name=name,
                    source=AzureDataLakeStorageGen2DataFeedSource(
                        account_name="adsampledatalakegen2",
                        account_key=self.azure_datalake_account_key,
                        file_system_name="adsample",
                        directory_template="%Y/%m/%d",
                        file_template="adsample.json"
                    ),
                    granularity=DataFeedGranularity(
                        granularity_type="Daily",
                    ),
                    schema=DataFeedSchema(
                        metrics=[
                            DataFeedMetric(name="cost", display_name="Cost"),
                            DataFeedMetric(name="revenue", display_name="Revenue")
                        ],
                        dimensions=[
                            DataFeedDimension(name="category", display_name="Category"),
                            DataFeedDimension(name="city", display_name="city")
                        ],
                    ),
                    ingestion_settings=DataFeedIngestionSettings(
                        ingestion_begin_time=datetime.datetime(2019, 1, 1),
                    ),

                )

                self.assertIsNotNone(data_feed.id)
                self.assertIsNotNone(data_feed.created_time)
                self.assertIsNotNone(data_feed.name)
                self.assertEqual(data_feed.source.data_source_type, "AzureDataLakeStorageGen2")
                self.assertEqual(data_feed.source.account_name, "adsampledatalakegen2")
                self.assertEqual(data_feed.source.file_system_name, "adsample")
                self.assertEqual(data_feed.source.directory_template, "%Y/%m/%d")
                self.assertEqual(data_feed.source.file_template, "adsample.json")

            finally:
                await self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_influxdb(self):
        name = self.create_random_name("influxdb")
        try:
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=InfluxDBDataFeed(
                    connection_string=self.influxdb_connection_string,
                    database="adsample",
                    user_name="adreadonly",
                    password=self.influxdb_password,
                    query="'select * from adsample2 where Timestamp = @StartTime'"
                ),
                granularity=DataFeedGranularity(
                    granularity_type="Daily",
                ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost"),
                        DataFeedMetric(name="revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category"),
                        DataFeedDimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 1, 1),
                ),

            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "InfluxDB")
            self.assertIsNotNone(data_feed.source.connection_string)
            self.assertIsNotNone(data_feed.source.query)
            self.assertIsNotNone(data_feed.source.password)
            self.assertEqual(data_feed.source.database, "adsample")
            self.assertEqual(data_feed.source.user_name, "adreadonly")

        finally:
            self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_application_insights(
            self, client, variables):
        name = self.create_random_name("applicationinsights")
        if self.is_live:
            variables["data_feed_name"] = name
        try:
            query = "let gran=60m; let starttime=datetime(@StartTime); let endtime=starttime + gran; requests | " \
                "where timestamp >= starttime and timestamp < endtime | summarize request_count = count(), " \
                "duration_avg_ms = avg(duration), duration_95th_ms = percentile(duration, 95), " \
                "duration_max_ms = max(duration) by resultCode"
            data_feed = client.create_data_feed(
                name=variables["data_feed_name"],
                source=AzureApplicationInsightsDataFeedSource(
                    azure_cloud="Azure",
                    application_id="3706fe8b-98f1-47c7-bf69-b73b6e53274d",
                    api_key="application_insights_api_key",
                    query=query),
                granularity=DataFeedGranularity(granularity_type="Daily", ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost"),
                        DataFeedMetric(name="revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category"),
                        DataFeedDimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2021, 7, 1), ),
            )
            if self.is_live:
                variables["data_feed_id"] = data_feed.id
            assert data_feed.id is not None
            assert data_feed.created_time is not None
            assert data_feed.name is not None
            assert data_feed.source.data_source_type == "AzureApplicationInsights"
            assert data_feed.source.application_id == "3706fe8b-98f1-47c7-bf69-b73b6e53274d"
            assert data_feed.source.query is not None

        finally:
            self.clean_up(client.delete_data_feed, variables)
        return variables
    def test_create_data_feed_with_datalake(self, client, variables):
        name = self.create_random_name("datalake")
        if self.is_live:
            variables["data_feed_name"] = name
        try:
            data_feed = client.create_data_feed(
                name=variables["data_feed_name"],
                source=AzureDataLakeStorageGen2DataFeedSource(
                    account_name="adsampledatalakegen2",
                    account_key="azure_datalake_account_key",
                    file_system_name="adsample",
                    directory_template="%Y/%m/%d",
                    file_template="adsample.json"),
                granularity=DataFeedGranularity(granularity_type="Daily", ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost", display_name="Cost"),
                        DataFeedMetric(name="revenue", display_name="Revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category",
                                          display_name="Category"),
                        DataFeedDimension(name="city", display_name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 1, 1), ),
            )
            if self.is_live:
                variables["data_feed_id"] = data_feed.id
            assert data_feed.id is not None
            assert data_feed.created_time is not None
            assert data_feed.name is not None
            assert data_feed.source.data_source_type == "AzureDataLakeStorageGen2"
            assert data_feed.source.account_name == "adsampledatalakegen2"
            assert data_feed.source.file_system_name == "adsample"
            assert data_feed.source.directory_template == "%Y/%m/%d"
            assert data_feed.source.file_template == "adsample.json"

        finally:
            self.clean_up(client.delete_data_feed, variables)
        return variables
Exemplo n.º 14
0
 def _create_data_feed(self, name):
     name = create_random_name(name)
     return self.admin_client.create_data_feed(
         name=name,
         source=SqlServerDataFeedSource(
             connection_string=self.sql_server_connection_string,
             query="select * from adsample2 where Timestamp = @StartTime"),
         granularity="Daily",
         schema=DataFeedSchema(
             metrics=[
                 DataFeedMetric(name="cost"),
                 DataFeedMetric(name="revenue")
             ],
             dimensions=[
                 DataFeedDimension(name="category"),
                 DataFeedDimension(name="city")
             ],
         ),
         ingestion_settings="2019-10-01T00:00:00Z",
     )
    def test_create_data_feed_with_influxdb(self, client, variables):
        name = self.create_random_name("influxdb")
        if self.is_live:
            variables["data_feed_name"] = name
        try:
            data_feed = client.create_data_feed(
                name=variables["data_feed_name"],
                source=InfluxDbDataFeedSource(
                    connection_string="influxdb_connection_string",
                    database="adsample",
                    user_name="adreadonly",
                    password="******",
                    query=
                    "'select * from adsample2 where Timestamp = @StartTime'"),
                granularity=DataFeedGranularity(granularity_type="Daily", ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost"),
                        DataFeedMetric(name="revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category"),
                        DataFeedDimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 1, 1), ),
            )
            if self.is_live:
                variables["data_feed_id"] = data_feed.id
            assert data_feed.id is not None
            assert data_feed.created_time is not None
            assert data_feed.name is not None
            assert data_feed.source.data_source_type == "InfluxDB"
            assert data_feed.source.query is not None
            assert data_feed.source.database == "adsample"
            assert data_feed.source.user_name == "adreadonly"

        finally:
            self.clean_up(client.delete_data_feed, variables)
        return variables
    def test_create_data_feed_with_elasticsearch(self):
        name = self.create_random_name("elastic")
        try:
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=ElasticsearchDataFeed(
                    host="ad-sample-es.westus2.cloudapp.azure.com",
                    port="9200",
                    auth_header=self.elasticsearch_auth_header,
                    query="'select * from adsample where timestamp = @StartTime'"
                ),
                granularity=DataFeedGranularity(
                    granularity_type="Daily",
                ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost", display_name="Cost"),
                        DataFeedMetric(name="revenue", display_name="Revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category", display_name="Category"),
                        DataFeedDimension(name="city", display_name="City")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 1, 1),
                ),

            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "Elasticsearch")
            self.assertIsNotNone(data_feed.source.auth_header)
            self.assertEqual(data_feed.source.port, "9200")
            self.assertEqual(data_feed.source.host, "ad-sample-es.westus2.cloudapp.azure.com")
            self.assertEqual(data_feed.source.query, "'select * from adsample where timestamp = @StartTime'")

        finally:
            self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_azure_cosmos_db(self, client, variables):
        name = self.create_random_name("cosmosfeed")
        if self.is_live:
            variables["data_feed_name"] = name
        try:
            data_feed = client.create_data_feed(
                name=variables["data_feed_name"],
                source=AzureCosmosDbDataFeedSource(
                    connection_string="azure_cosmosdb_connection_string",
                    sql_query=
                    "'SELECT * FROM Items I where I.Timestamp >= @StartTime and I.Timestamp < @EndTime'",
                    database="adsample",
                    collection_id="adsample"),
                granularity=DataFeedGranularity(granularity_type="Daily", ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost"),
                        DataFeedMetric(name="revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category"),
                        DataFeedDimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 10, 1), ),
            )
            if self.is_live:
                variables["data_feed_id"] = data_feed.id
            assert data_feed.id is not None
            assert data_feed.created_time is not None
            assert data_feed.name is not None
            assert data_feed.source.data_source_type == "AzureCosmosDB"
            assert data_feed.source.database == "adsample"
            assert data_feed.source.collection_id == "adsample"
            assert data_feed.source.sql_query == "'SELECT * FROM Items I where I.Timestamp >= @StartTime and I.Timestamp < @EndTime'"

        finally:
            self.clean_up(client.delete_data_feed, variables)
        return variables
    async def test_create_data_feed_with_azure_cosmos_db(self):
        name = self.create_random_name("cosmosfeedasync")
        async with self.admin_client:
            try:
                data_feed = await self.admin_client.create_data_feed(
                    name=name,
                    source=AzureCosmosDbDataFeedSource(
                        connection_string=self.azure_cosmosdb_connection_string,
                        sql_query="'SELECT * FROM Items I where I.Timestamp >= @StartTime and I.Timestamp < @EndTime'",
                        database="adsample",
                        collection_id="adsample"
                    ),
                    granularity=DataFeedGranularity(
                        granularity_type="Daily",
                    ),
                    schema=DataFeedSchema(
                        metrics=[
                            DataFeedMetric(name="cost"),
                            DataFeedMetric(name="revenue")
                        ],
                        dimensions=[
                            DataFeedDimension(name="category"),
                            DataFeedDimension(name="city")
                        ],
                    ),
                    ingestion_settings=DataFeedIngestionSettings(
                        ingestion_begin_time=datetime.datetime(2019, 10, 1),
                    ),

                )

                self.assertIsNotNone(data_feed.id)
                self.assertIsNotNone(data_feed.created_time)
                self.assertIsNotNone(data_feed.name)
                self.assertEqual(data_feed.source.data_source_type, "AzureCosmosDB")
                self.assertEqual(data_feed.source.database, "adsample")
                self.assertEqual(data_feed.source.collection_id, "adsample")
                self.assertEqual(data_feed.source.sql_query, "'SELECT * FROM Items I where I.Timestamp >= @StartTime and I.Timestamp < @EndTime'")
            finally:
                await self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_mongodb(self, client, variables):
        name = self.create_random_name("mongodb")
        if self.is_live:
            variables["data_feed_name"] = name
        try:
            data_feed = client.create_data_feed(
                name=variables["data_feed_name"],
                source=MongoDbDataFeedSource(
                    connection_string="mongodb_connection_string",
                    database="adsample",
                    command=
                    '{"find": "adsample", "filter": { Timestamp: { $eq: @StartTime }} "batchSize": 2000,}'
                ),
                granularity=DataFeedGranularity(granularity_type="Daily", ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost"),
                        DataFeedMetric(name="revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category"),
                        DataFeedDimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 1, 1), ),
            )
            if self.is_live:
                variables["data_feed_id"] = data_feed.id
            assert data_feed.id is not None
            assert data_feed.created_time is not None
            assert data_feed.name is not None
            assert data_feed.source.data_source_type == "MongoDB"
            assert data_feed.source.database == "adsample"
            assert data_feed.source.command, '{"find": "adsample", "filter": { Timestamp: { $eq: @StartTime }} "batchSize": 2000 == }'

        finally:
            self.clean_up(client.delete_data_feed, variables)
        return variables
    async def test_create_data_feed_with_mongodb(self):
        name = self.create_random_name("mongodbasync")
        async with self.admin_client:
            try:
                data_feed = await self.admin_client.create_data_feed(
                    name=name,
                    source=MongoDbDataFeedSource(
                        connection_string=self.mongodb_connection_string,
                        database="adsample",
                        command='{"find": "adsample", "filter": { Timestamp: { $eq: @StartTime }} "batchSize": 2000,}'
                    ),
                    granularity=DataFeedGranularity(
                        granularity_type="Daily",
                    ),
                    schema=DataFeedSchema(
                        metrics=[
                            DataFeedMetric(name="cost"),
                            DataFeedMetric(name="revenue")
                        ],
                        dimensions=[
                            DataFeedDimension(name="category"),
                            DataFeedDimension(name="city")
                        ],
                    ),
                    ingestion_settings=DataFeedIngestionSettings(
                        ingestion_begin_time=datetime.datetime(2019, 1, 1),
                    ),

                )

                self.assertIsNotNone(data_feed.id)
                self.assertIsNotNone(data_feed.created_time)
                self.assertIsNotNone(data_feed.name)
                self.assertEqual(data_feed.source.data_source_type, "MongoDB")
                self.assertEqual(data_feed.source.database, "adsample")
                self.assertEqual(data_feed.source.command, '{"find": "adsample", "filter": { Timestamp: { $eq: @StartTime }} "batchSize": 2000,}')

            finally:
                await self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_http_request_post(self):
        name = self.create_random_name("httprequestfeedpost")
        try:
            data_feed = self.admin_client.create_data_feed(
                name=name,
                source=HttpRequestDataFeed(
                    url=self.http_request_post_url,
                    http_method="POST",
                    payload="{'startTime': '@StartTime'}"
                ),
                granularity=DataFeedGranularity(
                    granularity_type="Daily",
                ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost"),
                        DataFeedMetric(name="revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category"),
                        DataFeedDimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 10, 1),
                ),

            )

            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "HttpRequest")
            self.assertIsNotNone(data_feed.source.url)
            self.assertEqual(data_feed.source.http_method, "POST")
            self.assertEqual(data_feed.source.payload, "{'startTime': '@StartTime'}")
        finally:
            self.admin_client.delete_data_feed(data_feed.id)
    def test_create_data_feed_with_azure_table(self, client, variables):
        name = self.create_random_name("tablefeed")
        if self.is_live:
            variables["data_feed_name"] = name
        try:
            data_feed = client.create_data_feed(
                name=variables["data_feed_name"],
                source=AzureTableDataFeedSource(
                    connection_string="azure_table_connection_string",
                    query=
                    "PartitionKey ge '@StartTime' and PartitionKey lt '@EndTime'",
                    table="adsample"),
                granularity=DataFeedGranularity(granularity_type="Daily", ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost"),
                        DataFeedMetric(name="revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category"),
                        DataFeedDimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 10, 1), ),
            )
            if self.is_live:
                variables["data_feed_id"] = data_feed.id
            assert data_feed.id is not None
            assert data_feed.created_time is not None
            assert data_feed.name is not None
            assert data_feed.source.data_source_type == "AzureTable"
            assert data_feed.source.table == "adsample"
            assert data_feed.source.query == "PartitionKey ge '@StartTime' and PartitionKey lt '@EndTime'"

        finally:
            self.clean_up(client.delete_data_feed, variables)
        return variables
    def test_create_data_feed_with_azure_blob(self, client, variables):
        name = self.create_random_name("blobfeed")
        if self.is_live:
            variables["data_feed_name"] = name
        try:
            data_feed = client.create_data_feed(
                name=variables["data_feed_name"],
                source=AzureBlobDataFeedSource(
                    connection_string="azure_blob_connection_string",
                    container="adsample",
                    blob_template="%Y/%m/%d/%h/JsonFormatV2.json"),
                granularity=DataFeedGranularity(granularity_type="Daily", ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost"),
                        DataFeedMetric(name="revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category"),
                        DataFeedDimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 10, 1), ),
            )
            if self.is_live:
                variables["data_feed_id"] = data_feed.id
            assert data_feed.id is not None
            assert data_feed.created_time is not None
            assert data_feed.name is not None
            assert data_feed.source.data_source_type == "AzureBlob"
            assert data_feed.source.container == "adsample"
            assert data_feed.source.blob_template == "%Y/%m/%d/%h/JsonFormatV2.json"

        finally:
            self.clean_up(client.delete_data_feed, variables)
        return variables
    def test_create_data_feed_with_data_explorer(self, client, variables):
        name = self.create_random_name("azuredataexplorer")
        if self.is_live:
            variables["data_feed_name"] = name
        try:
            query = "let StartDateTime = datetime(@StartTime); let EndDateTime = StartDateTime + 1d; " \
                    "adsample | where Timestamp >= StartDateTime and Timestamp < EndDateTime"
            data_feed = client.create_data_feed(
                name=variables["data_feed_name"],
                source=AzureDataExplorerDataFeedSource(
                    connection_string="azure_data_explorer_connection_string",
                    query=query),
                granularity=DataFeedGranularity(granularity_type="Daily", ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost"),
                        DataFeedMetric(name="revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category"),
                        DataFeedDimension(name="city")
                    ],
                ),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 1, 1), ),
            )
            if self.is_live:
                variables["data_feed_id"] = data_feed.id
            assert data_feed.id is not None
            assert data_feed.created_time is not None
            assert data_feed.name is not None
            assert data_feed.source.data_source_type == "AzureDataExplorer"
            assert data_feed.source.query == query

        finally:
            self.clean_up(client.delete_data_feed, variables)
        return variables
    def test_create_data_feed_from_sql_server(self):

        data_feed_name = self.create_random_name("testfeed")
        try:
            data_feed = self.admin_client.create_data_feed(
                name=data_feed_name,
                source=SqlServerDataFeedSource(
                    connection_string=self.sql_server_connection_string,
                    query=
                    u"select * from adsample2 where Timestamp = @StartTime"),
                granularity=DataFeedGranularity(granularity_type="Daily", ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost",
                                       display_name="display cost",
                                       description="the cost"),
                        DataFeedMetric(name="revenue",
                                       display_name="display revenue",
                                       description="the revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category",
                                          display_name="display category"),
                        DataFeedDimension(name="city",
                                          display_name="display city")
                    ],
                    timestamp_column="Timestamp"),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 10, 1),
                    data_source_request_concurrency=0,
                    ingestion_retry_delay=-1,
                    ingestion_start_offset=-1,
                    stop_retry_after=-1,
                ),
                admin_emails=["*****@*****.**"],
                data_feed_description="my first data feed",
                missing_data_point_fill_settings=
                DataFeedMissingDataPointFillSettings(fill_type="SmartFilling"),
                rollup_settings=DataFeedRollupSettings(
                    rollup_type="NoRollup",
                    rollup_method="None",
                ),
                viewer_emails=["viewers"],
                access_mode="Private",
                action_link_template="action link template")
            self.assertIsNotNone(data_feed.id)
            self.assertIsNotNone(data_feed.created_time)
            self.assertIsNotNone(data_feed.name)
            self.assertEqual(data_feed.source.data_source_type, "SqlServer")
            self.assertIsNotNone(data_feed.source.query)
            self.assertEqual(data_feed.granularity.granularity_type, "Daily")
            self.assertEqual(data_feed.granularity.custom_granularity_value,
                             None)
            self.assertEqual(data_feed.schema.metrics[0].name, "cost")
            self.assertEqual(data_feed.schema.metrics[1].name, "revenue")
            self.assertEqual(data_feed.schema.metrics[0].display_name,
                             "display cost")
            self.assertEqual(data_feed.schema.metrics[1].display_name,
                             "display revenue")
            self.assertEqual(data_feed.schema.metrics[0].description,
                             "the cost")
            self.assertEqual(data_feed.schema.metrics[1].description,
                             "the revenue")
            self.assertEqual(data_feed.schema.dimensions[0].name, "category")
            self.assertEqual(data_feed.schema.dimensions[1].name, "city")
            self.assertEqual(data_feed.schema.dimensions[0].display_name,
                             "display category")
            self.assertEqual(data_feed.schema.dimensions[1].display_name,
                             "display city")
            self.assertEqual(data_feed.ingestion_settings.ingestion_begin_time,
                             datetime.datetime(2019, 10, 1, tzinfo=tzutc()))
            self.assertEqual(
                data_feed.ingestion_settings.data_source_request_concurrency,
                0)
            self.assertEqual(
                data_feed.ingestion_settings.ingestion_retry_delay, -1)
            self.assertEqual(
                data_feed.ingestion_settings.ingestion_start_offset, -1)
            self.assertEqual(data_feed.ingestion_settings.stop_retry_after, -1)
            self.assertIn("*****@*****.**", data_feed.admin_emails)
            self.assertEqual(data_feed.data_feed_description,
                             "my first data feed")
            self.assertEqual(
                data_feed.missing_data_point_fill_settings.fill_type,
                "SmartFilling")
            self.assertEqual(data_feed.rollup_settings.rollup_type, "NoRollup")
            self.assertEqual(data_feed.rollup_settings.rollup_method, "None")
            self.assertEqual(data_feed.viewer_emails, ["viewers"])
            self.assertEqual(data_feed.access_mode, "Private")
            self.assertEqual(data_feed.action_link_template,
                             "action link template")
            self.assertEqual(data_feed.status, "Active")
            self.assertTrue(data_feed.is_admin)
            self.assertIsNotNone(data_feed.metric_ids)

        finally:
            self.admin_client.delete_data_feed(data_feed.id)

            with self.assertRaises(ResourceNotFoundError):
                self.admin_client.get_data_feed(data_feed.id)
    def test_create_data_feed_from_sql_server(self, client, variables):

        data_feed_name = self.create_random_name("testfeed")
        if self.is_live:
            variables["data_feed_name"] = data_feed_name
        try:
            data_feed = client.create_data_feed(
                variables["data_feed_name"],
                source=SqlServerDataFeedSource(
                    connection_string=self.sql_server_connection_string,
                    query=
                    u"select * from adsample2 where Timestamp = @StartTime"),
                granularity=DataFeedGranularity(granularity_type="Daily", ),
                schema=DataFeedSchema(
                    metrics=[
                        DataFeedMetric(name="cost",
                                       display_name="display cost",
                                       description="the cost"),
                        DataFeedMetric(name="revenue",
                                       display_name="display revenue",
                                       description="the revenue")
                    ],
                    dimensions=[
                        DataFeedDimension(name="category",
                                          display_name="display category"),
                        DataFeedDimension(name="city",
                                          display_name="display city")
                    ],
                    timestamp_column="Timestamp"),
                ingestion_settings=DataFeedIngestionSettings(
                    ingestion_begin_time=datetime.datetime(2019, 10, 1),
                    data_source_request_concurrency=0,
                    ingestion_retry_delay=-1,
                    ingestion_start_offset=-1,
                    stop_retry_after=-1,
                ),
                admins=["*****@*****.**"],
                data_feed_description="my first data feed",
                missing_data_point_fill_settings=
                DataFeedMissingDataPointFillSettings(fill_type="SmartFilling"),
                rollup_settings=DataFeedRollupSettings(
                    rollup_type="NoRollup",
                    rollup_method="None",
                ),
                viewers=["viewers"],
                access_mode="Private",
                action_link_template="action link template")
            if self.is_live:
                variables["data_feed_id"] = data_feed.id
            assert data_feed.id is not None
            assert data_feed.created_time is not None
            assert data_feed.name is not None
            assert data_feed.source.data_source_type == "SqlServer"
            assert data_feed.source.query is not None
            assert data_feed.granularity.granularity_type == "Daily"
            assert data_feed.granularity.custom_granularity_value is None
            assert data_feed.schema.metrics[0].name == "cost"
            assert data_feed.schema.metrics[1].name == "revenue"
            assert data_feed.schema.metrics[0].display_name == "display cost"
            assert data_feed.schema.metrics[
                1].display_name == "display revenue"
            assert data_feed.schema.metrics[0].description == "the cost"
            assert data_feed.schema.metrics[1].description == "the revenue"
            assert data_feed.schema.dimensions[0].name == "category"
            assert data_feed.schema.dimensions[1].name == "city"
            assert data_feed.schema.dimensions[
                0].display_name == "display category"
            assert data_feed.schema.dimensions[
                1].display_name == "display city"
            assert data_feed.ingestion_settings.ingestion_begin_time == datetime.datetime(
                2019, 10, 1, tzinfo=tzutc())
            assert data_feed.ingestion_settings.data_source_request_concurrency == 0
            assert data_feed.ingestion_settings.ingestion_retry_delay == -1
            assert data_feed.ingestion_settings.ingestion_start_offset == -1
            assert data_feed.ingestion_settings.stop_retry_after == -1
            assert "*****@*****.**" in data_feed.admins
            assert data_feed.data_feed_description == "my first data feed"
            assert data_feed.missing_data_point_fill_settings.fill_type == "SmartFilling"
            assert data_feed.rollup_settings.rollup_type == "NoRollup"
            assert data_feed.rollup_settings.rollup_method == "None"
            assert data_feed.viewers == ["viewers"]
            assert data_feed.access_mode == "Private"
            assert data_feed.action_link_template == "action link template"
            assert data_feed.status == "Active"
            assert data_feed.is_admin
            assert data_feed.metric_ids is not None

        finally:
            self.clean_up(client.delete_data_feed, variables)

            with pytest.raises(ResourceNotFoundError):
                client.get_data_feed(variables["data_feed_id"])
        return variables