def storage_account(): test_case = AzureMgmtTestCase("__init__") rg_preparer = ResourceGroupPreparer() storage_preparer = StorageAccountPreparer(name_prefix='pyacrstorage') # Set what the decorator is supposed to set for us for prep in [rg_preparer, storage_preparer]: prep.live_test = False prep.test_class_instance = test_case # Create rg_name = create_random_name("pystorage", 24) storage_name = create_random_name("pyacrstorage", 24) try: rg = rg_preparer.create_resource(rg_name) StorageTestCase._RESOURCE_GROUP = rg['resource_group'] try: storage_dict = storage_preparer.create_resource( storage_name, resource_group=rg['resource_group']) # Now the magic begins StorageTestCase._STORAGE_ACCOUNT = storage_dict['storage_account'] StorageTestCase._STORAGE_KEY = storage_dict['storage_account_key'] yield finally: storage_preparer.remove_resource( storage_name, resource_group=rg['resource_group']) StorageTestCase._STORAGE_ACCOUNT = None StorageTestCase._STORAGE_KEY = None finally: rg_preparer.remove_resource(rg_name) StorageTestCase._RESOURCE_GROUP = None
def _create_data_feed(self, name): name = create_random_name(name) return self.admin_client.create_data_feed( DataFeed( name=name, source=SQLServerDataFeed( connection_string=self.sql_server_connection_string, query='select * from adsample2 where Timestamp = @StartTime' ), granularity=DataFeedGranularity( granularity_type="Daily", ), schema=DataFeedSchema( metrics=[ DataFeedMetric(name="cost"), DataFeedMetric(name="revenue") ], dimensions=[ DataFeedDimension(name="category"), DataFeedDimension(name="city") ], timestamp_column="Timestamp" ), ingestion_settings=DataFeedIngestionSettings( ingestion_begin_time=datetime.datetime(2019, 10, 1), ) ) )
async def _create_alert_config_for_update(self, name): try: detection_config, data_feed = await self._create_data_feed_and_detection_config(name) alert_config_name = create_random_name(name) alert_config = await self.admin_client.create_alert_configuration( name=alert_config_name, cross_metrics_operator="AND", metric_alert_configurations=[ MetricAlertConfiguration( detection_configuration_id=detection_config.id, alert_scope=MetricAnomalyAlertScope( scope_type="TopN", top_n_group_in_scope=TopNGroupScope( top=5, period=10, min_top_count=9 ) ), alert_conditions=MetricAnomalyAlertConditions( metric_boundary_condition=MetricBoundaryCondition( direction="Both", companion_metric_id=data_feed.metric_ids['cost'], lower=1.0, upper=5.0 ) ) ), MetricAlertConfiguration( detection_configuration_id=detection_config.id, alert_scope=MetricAnomalyAlertScope( scope_type="SeriesGroup", series_group_in_scope={'city': 'Shenzhen'} ), alert_conditions=MetricAnomalyAlertConditions( severity_condition=SeverityCondition( min_alert_severity="Low", max_alert_severity="High" ) ) ), MetricAlertConfiguration( detection_configuration_id=detection_config.id, alert_scope=MetricAnomalyAlertScope( scope_type="WholeSeries" ), alert_conditions=MetricAnomalyAlertConditions( severity_condition=SeverityCondition( min_alert_severity="Low", max_alert_severity="High" ) ) ) ], hook_ids=[] ) return alert_config, data_feed, detection_config except Exception as e: self.admin_client.delete_data_feed(data_feed.id) raise e
def create_random_name(self, prefix, length): self.test_resources_count += 1 moniker = '{}{:06}'.format(prefix, self.test_resources_count) if self.in_recording: name = create_random_name(prefix, length) self.name_replacer.register_name_pair(name, moniker) return name return moniker
def create_random_name(self, prefix, length): self.test_resources_count += 1 moniker = '{}{:06}'.format(prefix, self.test_resources_count) if self.in_recording: name = create_random_name(prefix, length) self.name_replacer.register_name_pair(name, moniker) return name return moniker
def _create_detection_config_for_update(self, name): try: data_feed = self._create_data_feed(name) detection_config_name = create_random_name("testupdated") detection_config = self.admin_client.create_detection_configuration( name=detection_config_name, metric_id=data_feed.metric_ids['cost'], description="My test metric anomaly detection configuration", whole_series_detection_condition=MetricDetectionCondition( condition_operator="AND", smart_detection_condition=SmartDetectionCondition( sensitivity=50, anomaly_detector_direction="Both", suppress_condition=SuppressCondition(min_number=5, min_ratio=5)), hard_threshold_condition=HardThresholdCondition( anomaly_detector_direction="Both", suppress_condition=SuppressCondition(min_number=5, min_ratio=5), lower_bound=0, upper_bound=100), change_threshold_condition=ChangeThresholdCondition( change_percentage=50, shift_point=30, within_range=True, anomaly_detector_direction="Both", suppress_condition=SuppressCondition(min_number=2, min_ratio=2))), series_detection_conditions=[ MetricSingleSeriesDetectionCondition( series_key={ "city": "Shenzhen", "category": "Jewelry" }, smart_detection_condition=SmartDetectionCondition( anomaly_detector_direction="Both", sensitivity=63, suppress_condition=SuppressCondition( min_number=1, min_ratio=100))) ], series_group_detection_conditions=[ MetricSeriesGroupDetectionCondition( series_group_key={"city": "Sao Paulo"}, smart_detection_condition=SmartDetectionCondition( anomaly_detector_direction="Both", sensitivity=63, suppress_condition=SuppressCondition( min_number=1, min_ratio=100))) ]) return detection_config, data_feed except Exception as e: self.admin_client.delete_data_feed(data_feed.id) raise e
async def _create_data_feed_and_detection_config(self, name): data_feed = await self._create_data_feed(name) detection_config_name = create_random_name(name) detection_config = await self.admin_client.create_detection_configuration( name=detection_config_name, metric_id=data_feed.metric_ids['cost'], description="testing", whole_series_detection_condition=MetricDetectionCondition( smart_detection_condition=SmartDetectionCondition( sensitivity=50, anomaly_detector_direction="Both", suppress_condition=SuppressCondition(min_number=5, min_ratio=5)))) return detection_config, data_feed
def _create_data_feed_for_update(self, name): data_feed_name = create_random_name(name) return self.admin_client.create_data_feed( DataFeed( name=data_feed_name, source=SQLServerDataFeed( connection_string=self.sql_server_connection_string, query='select * from adsample2 where Timestamp = @StartTime' ), granularity=DataFeedGranularity( granularity_type="Daily", ), schema=DataFeedSchema( metrics=[ DataFeedMetric(name="cost", display_name="display cost", description="the cost"), DataFeedMetric(name="revenue", display_name="display revenue", description="the revenue") ], dimensions=[ DataFeedDimension(name="category", display_name="display category"), DataFeedDimension(name="city", display_name="display city") ], timestamp_column="Timestamp" ), ingestion_settings=DataFeedIngestionSettings( ingestion_begin_time=datetime.datetime(2019, 10, 1), data_source_request_concurrency=0, ingestion_retry_delay=-1, ingestion_start_offset=-1, stop_retry_after=-1, ), options=DataFeedOptions( admin_emails=["*****@*****.**"], data_feed_description="my first data feed", missing_data_point_fill_settings=DataFeedMissingDataPointFillSettings( fill_type="SmartFilling" ), rollup_settings=DataFeedRollupSettings( rollup_type="NoRollup", rollup_method="None", ), viewer_emails=["viewers"], access_mode="Private", action_link_template="action link template" ) ) )
def _create_data_feed(self, name): name = create_random_name(name) return self.admin_client.create_data_feed( name=name, source=SqlServerDataFeedSource( connection_string=self.sql_server_connection_string, query="select * from adsample2 where Timestamp = @StartTime"), granularity="Daily", schema=DataFeedSchema( metrics=[ DataFeedMetric(name="cost"), DataFeedMetric(name="revenue") ], dimensions=[ DataFeedDimension(name="category"), DataFeedDimension(name="city") ], ), ingestion_settings="2019-10-01T00:00:00Z", )