Ejemplo n.º 1
0
    def test_random_scheduler(self):
        simulation_config = config_pb2.SimulationConfig()
        simulation_config.scheduler.at_random.num_machines = 100
        simulation_config.scheduler.at_random.seed = 11

        random.seed(simulation_config.scheduler.at_random.seed)
        simulated_machine = random.randint(
            0, simulation_config.scheduler.at_random.num_machines)

        vmsample = [
            {
                "simulated_time": 300_000_000,
                "simulated_machine": 1,
                "sample": {
                    "time": 300_000_000,
                    "info": {
                        "unique_id": "1-2",
                        "machine_id": 3
                    },
                    "metrics": {
                        "avg_cpu_usage": 0.8,
                    },
                    "abstract_metrics": {
                        "usage": 1,
                        "limit": 1
                    },
                },
            },
        ]
    def test_priority_filter(self):
        simulation_config = config_pb2.SimulationConfig()
        simulation_config.filter.priority_range.lower_bound = 3
        simulation_config.filter.priority_range.upper_bound = 7

        correct_output = [
            {
                "time": 20,
                "info": {
                    "priority": 4,
                    "scheduling_class": 1,
                    "machine_id": 200,
                    "alloc_collection_id": 1,
                },
            },
            {
                "time": 30,
                "info": {
                    "priority": 6,
                    "scheduling_class": 2,
                    "machine_id": 300,
                    "alloc_collection_id": 0,
                },
            },
        ]

        with TestPipeline(runner=direct_runner.BundleBasedDirectRunner()) as p:
            input_vmsamples = p | "Create priority test input" >> beam.Create(
                self.multiple_vmsamples)
            output = FilterVMSample(input_vmsamples, simulation_config)

            assert_that(output, equal_to(correct_output))
    def test_all_filters(self):
        simulation_config = config_pb2.SimulationConfig()
        simulation_config.filter.remove_non_top_level_vms = True
        simulation_config.filter.priority_range.lower_bound = 1
        simulation_config.filter.priority_range.upper_bound = 7
        simulation_config.filter.scheduling_class_range.lower_bound = 0
        simulation_config.filter.scheduling_class_range.upper_bound = 1
        simulation_config.filter.start_time = 5
        simulation_config.filter.end_time = 15

        correct_output = [
            {
                "time": 10,
                "info": {
                    "priority": 2,
                    "scheduling_class": 0,
                    "machine_id": 100,
                    "alloc_collection_id": 0,
                },
            },
        ]

        with TestPipeline(runner=direct_runner.BundleBasedDirectRunner()) as p:
            input_vmsamples = p | "Create all filter input" >> beam.Create(
                self.multiple_vmsamples)
            output = FilterVMSample(input_vmsamples, simulation_config)

            assert_that(output, equal_to(correct_output))
    def test_top_level_filter(self):
        simulation_config = config_pb2.SimulationConfig()
        simulation_config.filter.remove_non_top_level_vms = True

        correct_output = [
            {
                "time": 10,
                "info": {
                    "priority": 2,
                    "scheduling_class": 0,
                    "machine_id": 100,
                    "alloc_collection_id": 0,
                },
            },
            {
                "time": 30,
                "info": {
                    "priority": 6,
                    "scheduling_class": 2,
                    "machine_id": 300,
                    "alloc_collection_id": 0,
                },
            },
        ]

        with TestPipeline(runner=direct_runner.BundleBasedDirectRunner()) as p:
            input_vmsamples = p | "Create top level filter input" >> beam.Create(
                self.multiple_vmsamples)
            output = FilterVMSample(input_vmsamples, simulation_config)

            assert_that(output, equal_to(correct_output))
Ejemplo n.º 5
0
    def setUp(self):
        self.simulation_config = config_pb2.SimulationConfig()
        self.simulation_config.reset_and_shift.reset_time_to_zero = True
        self.simulation_config.reset_and_shift.random_shift.lower_bound = 0
        self.simulation_config.reset_and_shift.random_shift.upper_bound = 600
        self.simulation_config.reset_and_shift.seed = 11

        random.seed(self.simulation_config.reset_and_shift.seed)
        self.random_shift = _MinutesToMicroseconds(5 * (random.randrange(
            self.simulation_config.reset_and_shift.random_shift.lower_bound,
            self.simulation_config.reset_and_shift.random_shift.upper_bound,
        ) // 5))
    def test_time_filter(self):
        simulation_config = config_pb2.SimulationConfig()
        simulation_config.filter.start_time = 15
        simulation_config.filter.end_time = 25

        correct_output = [
            {
                "time": 20,
                "info": {
                    "priority": 4,
                    "scheduling_class": 1,
                    "machine_id": 200,
                    "alloc_collection_id": 1,
                },
            },
        ]

        with TestPipeline(runner=direct_runner.BundleBasedDirectRunner()) as p:
            input_vmsamples = p | "Create time test input" >> beam.Create(
                self.multiple_vmsamples)
            output = FilterVMSample(input_vmsamples, simulation_config)

            assert_that(output, equal_to(correct_output))
Ejemplo n.º 7
0
    def test_schedule_by_machine_id(self):
        simulation_config = config_pb2.SimulationConfig()
        simulation_config.scheduler.by_machine_id = True

        vmsample = [
            {
                "simulated_time": 300_000_000,
                "simulated_machine": 1,
                "sample": {
                    "time": 300_000_000,
                    "info": {
                        "unique_id": "1-2",
                        "machine_id": 3
                    },
                    "metrics": {
                        "avg_cpu_usage": 0.8,
                    },
                    "abstract_metrics": {
                        "usage": 1,
                        "limit": 1
                    },
                },
            },
        ]
    def test_setting_memory_metric(self):
        simulation_config = config_pb2.SimulationConfig()
        simulation_config.metric.max_memory_usage = True
        correct_output = [{
            "simulated_time": 1,
            "simulated_machine": 1,
            "sample": {
                "time": 300000000,
                "info": {
                    "unique_id": "1-2",
                    "collection_id": 1,
                    "instance_index": 2,
                    "priority": 6,
                    "scheduling_class": 3,
                    "machine_id": 3,
                    "alloc_collection_id": 0,
                    "alloc_instance_index": 5,
                    "collection_type": 0,
                },
                "metrics": {
                    "avg_cpu_usage": 0.8,
                    "avg_memory_usage": 8,
                    "max_cpu_usage": 0.1,
                    "max_memory_usage": 0.1,
                    "random_sample_cpu_usage": 0.11,
                    "random_sample_memory_usage": 12,
                    "assigned_memory": 13,
                    "sample_rate": 17,
                    "p0_cpu_usage": 0,
                    "p10_cpu_usage": 0.1,
                    "p20_cpu_usage": 0.2,
                    "p30_cpu_usage": 0.3,
                    "p40_cpu_usage": 0.4,
                    "p50_cpu_usage": 0.5,
                    "p60_cpu_usage": 0.6,
                    "p70_cpu_usage": 0.7,
                    "p80_cpu_usage": 0.8,
                    "p90_cpu_usage": 0.9,
                    "p91_cpu_usage": 0.91,
                    "p92_cpu_usage": 0.92,
                    "p93_cpu_usage": 0.93,
                    "p94_cpu_usage": 0.94,
                    "p95_cpu_usage": 0.95,
                    "p96_cpu_usage": 0.96,
                    "p97_cpu_usage": 0.97,
                    "p98_cpu_usage": 0.98,
                    "p99_cpu_usage": 0.99,
                    "memory_limit": 0.8,
                    "cpu_limit": 0.6,
                },
                "abstract_metrics": {
                    "usage": 0.1,
                    "limit": 0.8
                },
            },
        }]

        with TestPipeline(runner=direct_runner.BundleBasedDirectRunner()) as p:
            input_vmsample = p | "Create test input" >> beam.Create(
                self.vmsample)
            output = SetAbstractMetrics(input_vmsample, simulation_config)
            assert_that(output, equal_to(correct_output))