def test_run_error_raise_sys_exit(self): # Sys exit on non-existent file with testfixtures.LogCapture(): app = BDSSApplication(False, fixtures.get("test_nonexistent.json")) with self.assertRaises(SystemExit): app.run() # Sys exit on empty workflow file with testfixtures.LogCapture(): app = BDSSApplication(False, fixtures.get("test_empty.json")) with self.assertRaises(SystemExit): app.run()
def test_nonexistent_file(self): # Provide a workflow file path that does not exists with testfixtures.LogCapture() as capture: with warnings.catch_warnings(): warnings.simplefilter("error") app = BDSSApplication(False, fixtures.get("test_nonexistent.json")) capture.clear() with self.assertRaises(FileNotFoundError): app._load_workflow() capture.check(('force_bdss.app.bdss_application', 'ERROR', "Unable to open workflow file '{}'.".format( fixtures.get("test_nonexistent.json"))))
def test_from_json(self): json_path = fixtures.get("test_probe.json") with open(json_path) as f: data = json.load(f) layer_data = {"data_sources": data["workflow"]["execution_layers"][0]} layer = ExecutionLayer.from_json(self.registry, layer_data) self.assertDictEqual( layer_data["data_sources"][0], { "id": "force.bdss.enthought.plugin.test.v0." "factory.probe_data_source", "model_data": { "input_slot_info": [{ "source": "Environment", "name": "foo" }], "output_slot_info": [{ "name": "bar" }], }, }, ) layer_data["data_sources"][0]["model_data"].update({ "input_slots_type": "PRESSURE", "output_slots_type": "PRESSURE", "input_slots_size": 1, "output_slots_size": 1, }) self.assertDictEqual(layer.__getstate__(), layer_data)
def test__internal_evaluate(self): workflow_file = ProbeWorkflowFile(path=fixtures.get("test_probe.json")) workflow_file.read() workflow = workflow_file.workflow kpi_results = workflow._internal_evaluate([1.0]) self.assertEqual(1, len(kpi_results))
def test__extract_execution_layers(self): registry = DummyFactoryRegistry() with open(fixtures.get("test_workflow_reader.json")) as f: data = json.load(f) workflow_data = data["workflow"] exec_layers = Workflow._extract_execution_layers( registry, workflow_data ) self.assertEqual(1, len(exec_layers)) self.assertIsInstance(exec_layers[0], ExecutionLayer) self.assertEqual(1, len(exec_layers[0].data_sources)) data_source = exec_layers[0].data_sources[0] self.assertIsInstance(data_source, DummyDataSourceModel) input_slots = data_source.input_slot_info self.assertEqual(1, len(input_slots)) self.assertIsInstance(input_slots[0], InputSlotInfo) self.assertEqual("input_slot_name", input_slots[0].name) output_slots = data_source.output_slot_info self.assertEqual(1, len(output_slots)) self.assertIsInstance(output_slots[0], OutputSlotInfo) self.assertEqual("output_slot_name", output_slots[0].name)
def test_non_valid_file(self): # Provide a workflow that is invalid self.operation.workflow_file = ProbeWorkflowFile( path=fixtures.get("test_null.json")) self.operation.workflow_file.read() with testfixtures.LogCapture() as capture: with self.assertRaises(RuntimeError): self.operation.run() capture.check( ("force_bdss.app.base_operation", "ERROR", "Unable to execute workflow due to verification errors:"), ("force_bdss.app.base_operation", "ERROR", "The MCO has no defined parameters"), ("force_bdss.app.base_operation", "ERROR", "The MCO has no defined KPIs"), ("force_bdss.app.base_operation", "ERROR", "The number of input slots (1 values) returned by " "'test_data_source' does " 'not match the number of user-defined names specified ' '(0 values). This is ' 'either a plugin error or a file error.'), ('force_bdss.app.base_operation', 'ERROR', "The number of output slots (1 values) returned by " "'test_data_source' does " 'not match the number of user-defined names specified ' '(0 values). This is ' 'either a plugin error or a file error.'))
def test_workflow(self): with testfixtures.LogCapture(): with warnings.catch_warnings(): warnings.simplefilter("ignore") app = BDSSApplication(False, fixtures.get("test_empty.json")) app._load_workflow() self.assertIsInstance(app.workflow_file.workflow, Workflow) with testfixtures.LogCapture(): with warnings.catch_warnings(): warnings.simplefilter("ignore") app = BDSSApplication(True, fixtures.get("test_empty.json")) app._load_workflow() self.assertIsInstance(app.workflow_file.workflow, Workflow)
def test_persistent_wfdata(self): registry = DummyFactoryRegistry() json_path = fixtures.get("test_workflow_reader.json") with open(json_path) as f: data = json.load(f) reference_data = deepcopy(data) _ = Workflow.from_json(registry, data["workflow"]) self.assertDictEqual(data, reference_data)
def test_assign_workflow_file(self): operation = OptimizeOperation() operation.workflow_file = ProbeWorkflowFile() self.assertIsNone(operation.workflow) operation.workflow_file.path = fixtures.get("test_empty.json") operation.workflow_file.read() self.assertIsNotNone(operation.workflow)
def setUp(self): self.plugin = {"id": "pid", "name": "Plugin"} self.factory = DummyMCOFactory(self.plugin) workflow_file = ProbeWorkflowFile(path=fixtures.get("test_probe.json")) workflow_file.read() self.workflow = workflow_file.workflow self.parameters = [1, 1, 1, 1] self.optimizer_engine = DummyOptimizerEngine( single_point_evaluator=self.workflow)
def test_from_json(self): registry = DummyFactoryRegistry() json_path = fixtures.get("test_workflow_reader.json") with open(json_path) as f: data = json.load(f) wf = Workflow.from_json(registry, data["workflow"]) workflow_state = wf.__getstate__() self.assertDictEqual( workflow_state, { "mco_model": { "id": "force.bdss.enthought.plugin.test.v0.factory." "dummy_mco", "model_data": { "parameters": [ { "id": "force.bdss.enthought.plugin." "test.v0.factory." "dummy_mco.parameter.dummy_mco_parameter", "model_data": {"x": 0, "name": "", "type": ""}, } ], "kpis": [], }, }, "notification_listeners": [ { "id": "force.bdss.enthought.plugin.test.v0.factory." "dummy_notification_listener", "model_data": {}, } ], "execution_layers": [ { "data_sources": [ { "id": "force.bdss.enthought.plugin.test.v0." "factory.dummy_data_source", "model_data": { "input_slot_info": [ { "name": "input_slot_name", "source": "Environment", } ], "output_slot_info": [ {"name": "output_slot_name"} ], }, } ] } ], }, )
def test_write(self): # Test normal behaviour self.workflow_file.path = fixtures.get("test_empty.json") self.workflow_file.read() self.workflow_file.write() # Test undefined writer self.workflow_file.writer = None with self.assertRaisesRegex(AttributeError, "No workflow writer specified."): self.workflow_file.write()
def test__extract_notification_listeners(self): registry = DummyFactoryRegistry() with open(fixtures.get("test_workflow_reader.json")) as f: data = json.load(f) workflow_data = data["workflow"] listeners = Workflow._extract_notification_listeners( registry, workflow_data ) self.assertEqual(1, len(listeners)) self.assertIsInstance(listeners[0], BaseNotificationListenerModel)
def test_notify_driver_event(self): workflow_file = ProbeWorkflowFile(path=fixtures.get("test_probe.json")) workflow_file.read() workflow = workflow_file.workflow with self.assertTraitChanges(workflow, "event", count=1): workflow.execution_layers[0].data_sources[0].notify( BaseDriverEvent() ) with self.assertTraitChanges(workflow, "event", count=1): workflow.mco_model.notify(BaseDriverEvent())
def test__extract_mco_model(self): registry = DummyFactoryRegistry() with open(fixtures.get("test_workflow_reader.json")) as f: data = json.load(f) workflow_data = data["workflow"] mco_model = Workflow._extract_mco_model(registry, workflow_data) mco_factory = registry.mco_factories[0] expected_mco_model = mco_factory.model_class self.assertIsInstance(mco_model, expected_mco_model) self.assertEqual(0, len(mco_model.kpis)) self.assertEqual(1, len(mco_model.parameters)) self.assertIsInstance(mco_model.parameters[0], DummyMCOParameter)
def test_run_empty_workflow(self): # Load a blank workflow self.operation.workflow_file = ProbeWorkflowFile( path=fixtures.get("test_empty.json")) self.operation.workflow_file.read() with testfixtures.LogCapture() as capture: with self.assertRaises(RuntimeError): self.operation.run() capture.check( ("force_bdss.app.base_operation", "ERROR", "Unable to execute workflow due to verification errors:"), ("force_bdss.app.base_operation", "ERROR", "Workflow has no MCO"), ("force_bdss.app.base_operation", "ERROR", "Workflow has no execution layers"))
def test_read(self): # Test normal behaviour self.workflow_file.path = fixtures.get("test_empty.json") self.workflow_file.read() self.assertIsInstance(self.workflow_file.workflow, Workflow) # Test non-existent file self.workflow_file.path = "foo/bar" with self.assertRaisesRegex(FileNotFoundError, "No such file or directory: 'foo/bar'"): self.workflow_file.read() # Test undefined reader self.workflow_file.reader = None with self.assertRaisesRegex(AttributeError, "No workflow reader specified."): self.workflow_file.read()
def test_notify_events(self): workflow_file = ProbeWorkflowFile(path=fixtures.get("test_probe.json")) workflow_file.read() workflow = workflow_file.workflow with self.assertTraitChanges(workflow, "event", count=1): with self.assertTraitChanges(workflow.mco_model, "event", count=1): workflow.mco_model.notify_start_event() with self.assertTraitChanges(workflow, "event", count=1): with self.assertTraitChanges(workflow.mco_model, "event", count=1): workflow.mco_model.notify_finish_event() with self.assertTraitChanges(workflow, "event", count=1): with self.assertTraitChanges(workflow.mco_model, "event", count=1): workflow.mco_model.notify_progress_event([ DataValue(value=2), DataValue(value=3) ], [DataValue(value=4), DataValue(value=5)])
def test_run_workflow_error(self): with testfixtures.LogCapture() as capture: with warnings.catch_warnings(): warnings.simplefilter("ignore") app = BDSSApplication(False, fixtures.get("test_empty.json")) app._load_workflow() capture.clear() with self.assertRaises(Exception): app._run_workflow() capture.check( ('force_bdss.app.base_operation', 'ERROR', 'Unable to execute workflow due to verification errors:'), ('force_bdss.app.base_operation', 'ERROR', 'Workflow has no MCO'), ('force_bdss.app.base_operation', 'ERROR', 'Workflow has no execution layers'), ('force_bdss.app.bdss_application', 'ERROR', 'Error running workflow.'))
def test_read_version_1(self): old_json = fixtures.get("test_workflow_reader_v1.json") workflow = self.wfreader.read(old_json) self.assertEqual(1, len(workflow.execution_layers)) self.assertEqual(1, len(workflow.execution_layers[0].data_sources))
def setUp(self): self.operation = OptimizeOperation() self.operation.workflow_file = ProbeWorkflowFile( path=fixtures.get("test_probe.json")) self.operation.workflow_file.read() self.registry = self.operation.workflow_file.reader.factory_registry
def setUp(self): self.registry = DummyFactoryRegistry() self.wfreader = WorkflowReader(self.registry) self.working_data = fixtures.get("test_workflow_reader.json")
def test_from_path_classmethod(self): # Test normal behaviour workflow_file = ProbeWorkflowFile.from_path( path=fixtures.get("test_empty.json")) self.assertIsInstance(workflow_file, ProbeWorkflowFile)