def test_can_serialize_work_unit(self): """ A regular WorkUnit has an ID and an entry in the database. """ sample = ExampleSample(name="S1") sample.save() work_unit = WorkUnit() work_unit.tracked_object = sample work_unit.save() result = WorkUnitSerializer(work_unit).data assert result['id'] == work_unit.id assert result['tracked_object']['id'] == sample.id
def test_can_serialize_external_work_unit(self): """ WorkUnits can be external only. In this case they only exist in the external workflow engine, as the system hasn't picked them up yet. """ sample = ExampleSample(name="S1") sample.save() work_unit = WorkUnit(external_work_unit_id="abc", external_workflow_instance_id="123") work_unit.tracked_object = sample result = WorkUnitSerializer(work_unit).data assert result['id'] == work_unit.id assert result['tracked_object']['id'] == sample.id
def test_can_assign_substances(self): """ Tests an entire path in the workflow: assigning samples, starting a workbatch and closing the workbatch """ # Create a handler so we model the highest level of interacting with the API, the same # way plugin developers would interact with it: sample_count = 5 self.toggle_log_level() cont = ExamplePlate(name="cont1") for x in range(1, sample_count + 1): sample = ExampleSample(name="sample-{}".format(x)) cont.append(sample) cont.save() workflow = SequenceSimple() workflow.comment = "Let's sequence some stuff" workflow.assign(cont) # We should see one assignment per sample samples_created = set([x.id for x in cont.contents]) delivered = SubstanceAssignment.objects.filter(status=SubstanceAssignment.STATUS_DELIVERED) assigned_samples = set([x.substance.id for x in delivered]) assert samples_created == assigned_samples work_units = workflow.wait_for_work_units("DataEntry", 5) tracked_objects_in_workflow_engine = [w.tracked_object.id for w in work_units] assert len(tracked_objects_in_workflow_engine) == sample_count
def bootstrap(self): self.sample1 = ExampleSample(name="Sample:{}".format(uuid4())) self.sample1.save() self.cont1 = PandorasBox(name="Container:{}".format(uuid4())) self.cont1["a1"] = self.sample1 self.cont1.save() self.cont2 = PandorasBox(name="Container:{}".format(uuid4())) self.cont2.save() self.workbatch = ExampleWorkBatch(name="WorkBatch:{}".format(uuid4())) self.workbatch.save() self.url = reverse(WorkBatchTransitionsEndpoint.name, args=(self.cont1.organization.name, self.workbatch.id)) self.login_as(self.user)
def create_available_work(count): # Makes sure we have some work that's available, i.e. start workflow and then # wait until the workflow is waiting for user intervention cont = ExamplePlate(name="cont1") for x in range(1, count + 1): sample = ExampleSample(name="sample-{}".format(x)) cont.append(sample) cont.save() workflow = SequenceSimple() workflow.comment = "Let's sequence some stuff" workflow.assign(cont)
def test_get(self): # 1. Create some work: count = 3 cont = ExamplePlate(name="cont1") for x in range(1, count + 1): sample = ExampleSample(name="sample-{}".format(x)) cont.append(sample) cont.save() created_samples_ids = {s.id for s in cont.contents} workflow = SequenceSimple() workflow.comment = "Let's sequence some stuff" workflow.assign(cont) # 2. Wait for the work to be ready: self.login_as(self.user) work_def_id = "clims.plugins.demo.dnaseq.workflows.sequence.SequenceSimple:DataEntry" url = reverse(WorkUnitsByWorkDefinitionEndpoint.name, args=(work_def_id, )) response = self.wait_for_endpoint_list(url, 3) assert response.status_code == 200 data = response.json() assert len(data) == count for entry in data: assert len(entry) == 6 assert entry["workflow_provider"] == "camunda" assert len(entry["external_work_unit_id"]) == 36 # expecting guid string assert entry["work_type"] == "clims.plugins.demo.dnaseq.workflows.sequence.DataEntry" assert entry["external_workflow_instance_id"] tracked_object_id = entry["tracked_object"]["id"] created_samples_ids.remove(int(tracked_object_id)) # Make sure we can do a second successful query. This is because getting fresh work units # for the first time will save them locally, so the state has changed in the meantime response = self.wait_for_endpoint_list(url, 3) assert response.status_code == 200
def test_post(self): # TODO: Fix this test, or an equivalent one. self.login_as(self.user) sample_count = 3 cont = ExamplePlate(name="cont1") for x in range(1, sample_count + 1): sample = ExampleSample(name="sample-{}".format(x)) cont.append(sample) cont.save() workflow = SequenceSimple() workflow.comment = "Let's sequence some stuff" workflow.assign(cont) # Wait until the work unit enpoint returns these items: work_units_url = reverse(WorkUnitsEndpoint.name, args=(self.organization.name, )) # Wait for getting a large enough response from the endpoint resp = self.wait_for_endpoint_list(work_units_url, sample_count) data = resp.json() ids = [entry["id"] for entry in data] data = { "work_units": ids, } work_batch_url = reverse(WorkBatchEndpoint.name, args=(self.organization.name, )) response = self.client.post( path=work_batch_url, data=json.dumps(data), content_type='application/json', ) assert response.status_code == 201
def test_creates_assignment_entries(self): workflow = SequenceSimple() sample = ExampleSample(name="SomeSample") sample.save() workflow.assign(sample)
def test_can_not_assign_non_existing_substances(self): with pytest.raises(self.app.workflows.AssignmentError): workflow = SequenceSimple() sample = ExampleSample() workflow.assign(sample)
class TestWorkBatchTransitions(APITestCase): def setUp(self): self.has_context() # so we can use the domain classes defined in the demo plugin, e.g. StuffoMagico self.install_main_demo_plugin() # TODO: Add a DELETE method too. It will be used when # the frontend component needs to cancel some of those # transitions. It will also have to remove the sample # from the container it created. Note that if the user # goes back and forths with this a lot it can generate # a significant amount of data, so such samples should # be soft deleted too so they can be removed in a cleanup # job if users want that. @pytest.mark.xfail def test_delete(self): raise NotImplementedError() def bootstrap(self): self.sample1 = ExampleSample(name="Sample:{}".format(uuid4())) self.sample1.save() self.cont1 = PandorasBox(name="Container:{}".format(uuid4())) self.cont1["a1"] = self.sample1 self.cont1.save() self.cont2 = PandorasBox(name="Container:{}".format(uuid4())) self.cont2.save() self.workbatch = ExampleWorkBatch(name="WorkBatch:{}".format(uuid4())) self.workbatch.save() self.url = reverse(WorkBatchTransitionsEndpoint.name, args=(self.cont1.organization.name, self.workbatch.id)) self.login_as(self.user) def test_post(self): self.bootstrap() payload = { "transitions": [{ "type": "move", "source_position": { "container_id": self.cont1.id, "index": "a1" }, "target_position": { "container_id": self.cont2.id, "index": "b2" }, }] } response = self.client.post( path=self.url, data=json.dumps(payload), content_type='application/json', ) assert response.status_code == status.HTTP_201_CREATED def test_post__invalid(self): self.bootstrap() payload = { "transitions": [{ "type": "move", "source_position": { "container_id": self.cont1.id, "index": "z100" }, "target_position": { "container_id": self.cont2.id, "index": "b2" }, }] } response = self.client.post( path=self.url, data=json.dumps(payload), content_type='application/json', ) # TODO: this should be HTTP_400_BAD_REQUEST assert response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR def test_post__invalid_source_substance(self): self.bootstrap() payload = { "transitions": [{ "type": "move", "source_position": { "container_id": self.cont1.id, "index": "c1" }, "target_position": { "container_id": self.cont2.id, "index": "c2" }, }] } response = self.client.post( path=self.url, data=json.dumps(payload), content_type='application/json', ) # TODO: this should be HTTP_400_BAD_REQUEST assert response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR