class TestDummyEnvironment(DatabaseFixture): def setUp(self): super().setUp() self.env = DummyTaskEnvironment() def test_get_performance(self): assert self.env.get_performance() == 0.0 # given perf = 1234.5 p = Performance(environment_id=DummyTaskEnvironment.get_id(), value=perf) p.save() # then self.assertEqual(self.env.get_performance(), perf) def test_get_min_accepted_performance_default(self): self.assertEqual(MinPerformanceMultiplier.get(), 0.0) self.assertEqual(self.env.get_min_accepted_performance(), 0.0) def test_get_min_accepted_performance(self): # given p = Performance(environment_id=DummyTaskEnvironment.get_id(), min_accepted_step=100) p.save() MinPerformanceMultiplier.set(3.141) # then self.assertEqual(MinPerformanceMultiplier.get(), 3.141) self.assertEqual(self.env.get_min_accepted_performance(), 314.1) def test_main_program_file(self): assert path.isfile(DummyTaskEnvironment().main_program_file)
def __init__(self): self._normalization_constant = 1000 # TODO tweak that. issue #1356 self.dummy_task_path = join(get_golem_path(), "apps", "dummy", "test_data") td = self._task_definition = DummyTaskDefinition(DummyTaskDefaults()) td.shared_data_files = [join(self.dummy_task_path, x) for x in td.shared_data_files] td.out_file_basename = td.out_file_basename td.task_id = str(uuid.uuid4()) td.main_program_file = DummyTaskEnvironment().main_program_file td.resources = {join(self.dummy_task_path, "in.data")} td.add_to_resources() self.verification_options = {"difficulty": td.options.difficulty, "shared_data_files": td.shared_data_files, "result_size": td.result_size, "result_extension": DummyTask.RESULT_EXT} verification_data = dict() self.verification_options["subtask_id"] = "DummyBenchmark" verification_data['subtask_info'] = self.verification_options self.verifier = DummyTaskVerifier(verification_data) self.subtask_data = \ DummyTask.TESTING_CHAR * td.options.subtask_data_size
def test_get_min_accepted_performance(self): # given p = Performance(environment_id=DummyTaskEnvironment.get_id(), min_accepted_step=100) p.save() MinPerformanceMultiplier.set(3.141) # then self.assertEqual(MinPerformanceMultiplier.get(), 3.141) self.assertEqual(self.env.get_min_accepted_performance(), 314.1)
def test_get_performance(self): assert self.env.get_performance() == 0.0 # given perf = 1234.5 p = Performance(environment_id=DummyTaskEnvironment.get_id(), value=perf) p.save() # then self.assertEqual(self.env.get_performance(), perf)
def __init__(self): super(DummyTaskOptions, self).__init__() self.environment = DummyTaskEnvironment() self.subtask_data_size = 128 # # length of subtask-specific hex number # The difficulty is a 4 byte int; 0xffffffff is the greatest # and 0x00000000 is the least difficulty. # For example difficulty 0xffff0000 requires # 0xffffffff /(0xffffffff - 0xffff0000) = 65537 # hash computations on average. self.difficulty = 0xffff0000
def test_run_benchmark_fail(self, *_): from apps.dummy.dummyenvironment import DummyTaskEnvironment def raise_exc(*_args, **_kwargs): raise Exception('Test exception') with patch("golem.docker.image.DockerImage.is_available", return_value=True), \ patch("golem.docker.job.DockerJob.__init__", side_effect=raise_exc), \ self.assertRaisesRegex(Exception, 'Test exception'): sync_wait(self.client.run_benchmark(DummyTaskEnvironment.get_id()))
def test_main_program_file(self): assert path.isfile(DummyTaskEnvironment().main_program_file)
def setUp(self): super().setUp() self.env = DummyTaskEnvironment()