def test_build_allocations_from_rules(rules, expected_tasks_allocations): tasks_data = { 't1_task_id': task_data('/t1', labels={'foo': 'bar'}), 't2_task_id': task_data('/t1') } assert _build_allocations_from_rules(tasks_data, rules) == expected_tasks_allocations
def test_static_allocator(allocate_according_rules_mock, load_config_mock): static_allocator = StaticAllocator(config='somefile', rules=[{ 'allocations': { 'cpu_quota': 0.5 } }]) platform_mock = Mock() tasks_data = { 't1_task_id': task_data('/t1', labels={'foo': 'bar'}, resources={'cpu_quota': 1.0}) } assert static_allocator.allocate(platform_mock, tasks_data) == ({}, [], []) allocate_according_rules_mock.assert_called_once() load_config_mock.assert_called_once()
platform_mock, allocation_configuration=AllocationConfiguration( cpu_quota_period=1000)) } assert {} == _get_tasks_allocations(containers) @pytest.mark.parametrize( 'allocations, tasks_data, expected', (({ 't1_task_id': { AllocationType.SHARES: 10 } }, { 't1_task_id': task_data('/t1') }, { 't1_task_id': task_data('/t1', allocations={AllocationType.SHARES: 10}) }), ({ 't1_task_id': { AllocationType.SHARES: 10 }, 't2_task_id': { AllocationType.SHARES: 20 } }, { 't1_task_id': task_data('/t1') }, { 't1_task_id': task_data('/t1', allocations={AllocationType.SHARES: 10}) }), ({}, {
runner._iterate() sleep_mock.assert_called_with(0.7) assert runner._last_iteration == 1.3 with patch('time.time', return_value=2.5): runner._iterate() sleep_mock.assert_called_with(0) _task_cpu_usage_metadata = METRICS_METADATA[MetricName.TASK_CPU_USAGE_SECONDS] @pytest.mark.parametrize( 'tasks_data, expected_metrics', [({}, []), ({ 't1_task_id': task_data('/t1', labels={'app': 'redis'}) }, []), ({ 't1_task_id': task_data( '/t1', labels={'app': 'redis'}, measurements={'task_cpu_usage_seconds': DEFAULT_METRIC_VALUE}) }, [ Metric(MetricName.TASK_CPU_USAGE_SECONDS, labels={'app': 'redis'}, value=DEFAULT_METRIC_VALUE, unit=_task_cpu_usage_metadata.unit, granularity=_task_cpu_usage_metadata.granularity, help=_task_cpu_usage_metadata.help, type=_task_cpu_usage_metadata.type)
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pytest from wca.detectors import convert_anomalies_to_metrics from tests.testing import anomaly, anomaly_metrics, task_data @pytest.mark.parametrize('anomalies,tasks_data,expected_metrics', ( ([], {}, []), ([anomaly('t1', ['t2'])], { 't1': task_data('/t1', labels={'workload_instance': 't1_workload_instance'}), 't2': task_data('/t2', labels={'workload_instance': 't2_workload_instance'}) }, anomaly_metrics('t1', ['t2'], { 't1': 't1_workload_instance', 't2': 't2_workload_instance' })), ([anomaly('t2', ['t1', 't3'])], { 't1': task_data('/t1', labels={'workload_instance': 't1_workload_instance'}), 't2': task_data('/t2', labels={'workload_instance': 't2_workload_instance'}), 't3': task_data('/t3', labels={'workload_instance': 't3_workload_instance'}) },