def test_simple_update_usage(self): self.queries(ProjectI(1, True)) self.saves(ProjectI(1, False)) self.mox.ReplayAll() self.cli.invoke(("obj update Project:1 name=bar " "description=loooong"), strict=True) assert self.cli._out == ["Project:1"]
def run(password, project_name, dataset_name, host, port): for user_number in range(1, 41): username = "******" % user_number print(username) conn = BlitzGateway(username, password, host=host, port=port) try: conn.connect() project = ProjectI() project.setName(rstring(project_name)) update_service = conn.getUpdateService() project = update_service.saveAndReturnObject(project) ds = conn.getObject("Dataset", attributes={'name': dataset_name}, opts={'owner': conn.getUserId()}) if ds is None: print("No dataset with name %s found" % dataset_name) continue dataset_id = ds.getId() print(username, dataset_id) link = ProjectDatasetLinkI() link.setParent(ProjectI(project.getId().getValue(), False)) link.setChild(DatasetI(dataset_id, False)) conn.getUpdateService().saveObject(link) except Exception as exc: print("Error while creating project: %s" % str(exc)) finally: conn.close()
def projects(request, itest, update_service, names): """ Returns four new OMERO Projects with required fields set and with names that can be used to exercise sorting semantics. """ to_save = [ProjectI(), ProjectI(), ProjectI(), ProjectI()] for index, project in enumerate(to_save): project.name = rstring(names[index]) return update_service.saveAndReturnArray(to_save)
def projects(request, itest, update_service): """Returns 2 new OMERO Projects with required fields set.""" project = ProjectI() project.name = rstring(itest.uuid()) project = update_service.saveAndReturnObject(project) proj = ProjectI() proj.name = rstring(itest.uuid()) proj = update_service.saveAndReturnObject(proj) return [project, proj]
def projects_datasets(request, itest, update_service, names): """ Returns four new OMERO Projects and four linked Datasets with required fields set and with names that can be used to exercise sorting semantics. """ projects = [ProjectI(), ProjectI(), ProjectI(), ProjectI()] for index, project in enumerate(projects): project.name = rstring(names[index]) datasets = [DatasetI(), DatasetI(), DatasetI(), DatasetI()] for index, dataset in enumerate(datasets): dataset.name = rstring(names[index]) project.linkDataset(dataset) return update_service.saveAndReturnArray(projects)
def test_project_validation(self, user_A): """ Test to demonstrate details bug on encode->decode. Test illustrates the ValidationException we see when Project is encoded to dict then decoded back to Project and saved. No exception is seen if the original Project is simply saved without encode & decode OR if the details are unloaded before saving """ conn = get_connection(user_A) project = ProjectI() project.name = rstring('test_project_validation') project = conn.getUpdateService().saveAndReturnObject(project) # Saving original Project again is OK conn.getUpdateService().saveObject(project) # encode and decode before Save raises Validation Exception project_json = get_encoder(project.__class__).encode(project) decoder = get_decoder(project_json['@type']) p = decoder.decode(project_json) with pytest.raises(ValidationException): conn.getUpdateService().saveObject(p) p = decoder.decode(project_json) # Unloading details allows Save without exception p.unloadDetails() conn.getUpdateService().saveObject(p)
def project_datasets(self, user1): """Return Project with Datasets and an orphaned Dataset.""" # Create and name all the objects project = ProjectI() project.name = rstring('Project') # Create 5 Datasets, each with 0-4 images. for d in range(5): dataset1 = DatasetI() dataset1.name = rstring('Dataset%s' % d) for i in range(d): image = self.create_test_image(size_x=5, size_y=5, session=user1[0].getSession(), name="Image%s" % i) image = ImageI(image.id.val, False) dataset1.linkImage(image) project.linkDataset(dataset1) # Create single orphaned Dataset dataset = DatasetI() dataset.name = rstring('Dataset') project = get_update_service(user1).saveAndReturnObject(project) dataset = get_update_service(user1).saveAndReturnObject(dataset) return project, dataset
def test_unknown_class(self): self.saves(ProjectI(1, False)) self.mox.ReplayAll() state = TxState(self.cli) cmd = TxCmd(state, arg_list=["new", "Project", "name=foo"]) action = NewObjectTxAction(state, cmd) action.go(self.cli, None)
def create_map_annotation(ctx, annotation, target_id, target_type="Project"): """Creates a map annotation, uploads it to Omero, and links it to target object""" # populate new MapAnnotationData object with dictionary result = ArrayList() for item in annotation: # add key:value pairs; both need to be strings result.add(NamedValue(str(item), str(annotation[item]))) data = MapAnnotationData() data.setContent(result) data.setDescription("Demo Example") #Use the following namespace if you want the annotation to be editable in the webclient and insight data.setNameSpace(MapAnnotationData.NS_CLIENT_CREATED) dm = gateway.getFacility(DataManagerFacility) target_obj = None # use the appropriate target DataObject and attach the MapAnnotationData object to it if target_type == "Project": target_obj = ProjectData(ProjectI(target_id, False)) elif target_type == "Dataset": target_obj = DatasetData(DatasetI(target_id, False)) elif target_type == "Image": target_obj = ImageData(ImageI(target_id, False)) result = dm.attachAnnotation(ctx, data, target_obj) return result
def test_project_delete(self, user1): conn = get_connection(user1) user_name = conn.getUser().getName() django_client = self.new_django_client(user_name, user_name) project = ProjectI() project.name = rstring('test_project_delete') project.description = rstring('Test update') project = get_update_service(user1).saveAndReturnObject(project) version = api_settings.API_VERSIONS[-1] project_url = reverse('api_project', kwargs={'api_version': version, 'object_id': project.id.val}) # Before delete, we can read pr_json = get_json(django_client, project_url) assert pr_json['data']['Name'] == 'test_project_delete' # Delete delete_json(django_client, project_url, {}) # Get should now return 404 rsp = get_json(django_client, project_url, status_code=404) assert rsp['message'] == 'Project %s not found' % project.id.val # Delete (again) should return 404 rsp = delete_json(django_client, project_url, {}, status_code=404) assert rsp['message'] == 'Project %s not found' % project.id.val save_url = reverse('api_save', kwargs={'api_version': version}) # TODO: Try to save deleted object - should return ApiException # see https://trello.com/c/qWNt9vLN/178-save-deleted-object with pytest.raises(AssertionError): rsp = put_json(django_client, save_url, pr_json, status_code=400) assert rsp['message'] == 'Project %s not found' % project.id.val
def project_hierarchy_user1_group1(self, user1): """ Returns OMERO Projects with Dataset Children with Image Children Note: This returns a list of mixed objects in a specified order """ # Create and name all the objects project1 = ProjectI() project1.name = rstring('Project1') project2 = ProjectI() project2.name = rstring('Project2') dataset1 = DatasetI() dataset1.name = rstring('Dataset1') dataset2 = DatasetI() dataset2.name = rstring('Dataset2') image1 = self.new_image(name='Image1') image2 = self.new_image(name='Image2') # Link them together like so: # project1 # dataset1 # image1 # image2 # dataset2 # image2 # project2 # dataset2 # image2 project1.linkDataset(dataset1) project1.linkDataset(dataset2) project2.linkDataset(dataset2) dataset1.linkImage(image1) dataset1.linkImage(image2) dataset2.linkImage(image2) to_save = [project1, project2] projects = get_update_service(user1).saveAndReturnArray(to_save) projects.sort(key=lambda x: lower_or_none(unwrap(x.name))) datasets = projects[0].linkedDatasetList() datasets.sort(key=lambda x: lower_or_none(unwrap(x.name))) images = datasets[0].linkedImageList() images.sort(key=lambda x: lower_or_none(unwrap(x.name))) return projects + datasets + images
def run(password, project_name, dataset_names, host, port): for user_number in range(1, 51): username = "******" % user_number print(username) conn = BlitzGateway(username, password, host=host, port=port) try: conn.connect() project = ProjectI() project.setName(rstring(project_name)) update_service = conn.getUpdateService() project = update_service.saveAndReturnObject(project) except Exception as exc: print("Error while creating project: %s" % str(exc)) conn.close() continue for dataset_name in dataset_names: try: params = omero.sys.ParametersI() params.addString('username', username) # make sure only one result is returned by query params.page(0, 1) query = "from Dataset where name='%s' \ AND details.owner.omeName=:username \ ORDER BY id DESC" % dataset_name service = conn.getQueryService() ds_list = service.findAllByQuery(query, params, conn.SERVICE_OPTS) if ds_list is None: print("No dataset with name %s found" % dataset_name) continue dataset_id = ds_list[0].getId().getValue() print(username, dataset_id) link = ProjectDatasetLinkI() link.setParent(ProjectI(project.getId().getValue(), False)) link.setChild(DatasetI(dataset_id, False)) conn.getUpdateService().saveObject(link) except Exception as exc: print("Error while linking dataset to project: %s" % str(exc)) conn.close()
def upload_csv_to_omero(ctx, file, tablename, target_id, target_type="Project"): """Upload the CSV file and attach it to the specified object""" print file print file.name svc = gateway.getFacility(DataManagerFacility) file_size = os.path.getsize(file.name) original_file = OriginalFileI() original_file.setName(rstring(tablename)) original_file.setPath(rstring(file.name)) original_file.setSize(rlong(file_size)) checksum_algorithm = ChecksumAlgorithmI() checksum_algorithm.setValue(rstring(ChecksumAlgorithmSHA1160.value)) original_file.setHasher(checksum_algorithm) original_file.setMimetype(rstring("text/csv")) original_file = svc.saveAndReturnObject(ctx, original_file) store = gateway.getRawFileService(ctx) # Open file and read stream store.setFileId(original_file.getId().getValue()) print original_file.getId().getValue() try: store.setFileId(original_file.getId().getValue()) with open(file.name, 'rb') as stream: buf = 10000 for pos in range(0, long(file_size), buf): block = None if file_size - pos < buf: block_size = file_size - pos else: block_size = buf stream.seek(pos) block = stream.read(block_size) store.write(block, pos, block_size) original_file = store.save() finally: store.close() # create the file annotation namespace = "training.demo" fa = FileAnnotationI() fa.setFile(original_file) fa.setNs(rstring(namespace)) if target_type == "Project": target_obj = ProjectData(ProjectI(target_id, False)) elif target_type == "Dataset": target_obj = DatasetData(DatasetI(target_id, False)) elif target_type == "Image": target_obj = ImageData(ImageI(target_id, False)) svc.attachAnnotation(ctx, FileAnnotationData(fa), target_obj)
def project_userA(request, userA, groupA): """ Returns new OMERO Project """ ctx = {'omero.group': str(groupA.id.val)} project = ProjectI() project.name = rstring("test_tree_annnotations") project = get_update_service(userA).saveAndReturnObject(project, ctx) return project
def project_dataset(request, itest, update_service): """ Returns a new OMERO Project and linked Dataset with required fields set. """ project = ProjectI() project.name = rstring(itest.uuid()) dataset = DatasetI() dataset.name = rstring(itest.uuid()) project.linkDataset(dataset) return update_service.saveAndReturnObject(project)
def test_project_update(self, user1): conn = get_connection(user1) group = conn.getEventContext().groupId user_name = conn.getUser().getName() django_client = self.new_django_client(user_name, user_name) project = ProjectI() project.name = rstring('test_project_update') project.description = rstring('Test update') project = get_update_service(user1).saveAndReturnObject(project) # Update Project in 2 ways... version = api_settings.API_VERSIONS[-1] project_url = reverse('api_project', kwargs={ 'api_version': version, 'object_id': project.id.val }) save_url = reverse('api_save', kwargs={'api_version': version}) # 1) Get Project, update and save back rsp = get_json(django_client, project_url) project_json = rsp['data'] assert project_json['Name'] == 'test_project_update' project_json['Name'] = 'new name' rsp = put_json(django_client, save_url, project_json) project_json = rsp['data'] assert project_json['@id'] == project.id.val assert project_json['Name'] == 'new name' # Name has changed assert project_json['Description'] == 'Test update' # No change # 2) Put from scratch (will delete empty fields, E.g. Description) save_url += '?group=' + str(group) payload = {'Name': 'updated name', '@id': project.id.val} # Test error message if we don't pass @type: rsp = put_json(django_client, save_url, payload, status_code=400) assert rsp['message'] == 'Need to specify @type attribute' # Add @type and try again payload['@type'] = project_json['@type'] rsp = put_json(django_client, save_url, payload) project_json = rsp['data'] assert project_json['@id'] == project.id.val assert project_json['Name'] == 'updated name' assert 'Description' not in project_json # Get project again to check update rsp = get_json(django_client, project_url) pr_json = rsp['data'] assert pr_json['Name'] == 'updated name' assert 'Description' not in pr_json # Now add description and save again pr_json['Description'] = 'New test description update' put_json(django_client, save_url, pr_json) # Read to check rsp = get_json(django_client, project_url) assert rsp['data']['Description'] == 'New test description update'
def post_dataset(conn, dataset_name, project_id=None, description=None): """Create a new dataset. Parameters ---------- conn : ``omero.gateway.BlitzGateway`` object OMERO connection. dataset_name : str Name of the Dataset being created. project_id : int, optional Id of Project in which to create the Dataset. If no Project is specified, the Dataset will be orphaned. description : str Description for the new Dataset. Returns ------- dataset_id : int Id of the dataset that has been created. Examples -------- Create a new orphaned Dataset: >>> did = post_dataset(conn, "New Dataset") >>> did 234 Create a new Dataset in Project:120: >>> did = post_dataset(conn, "Child of 120", project_id=120) >>> did """ if type(dataset_name) is not str: raise TypeError('Dataset name must be a string') if type(description) is not str and description is not None: raise TypeError('Dataset description must be a string') dataset = DatasetWrapper(conn, DatasetI()) dataset.setName(dataset_name) if description is not None: dataset.setDescription(description) dataset.save() if project_id is not None: if type(project_id) is not int: raise TypeError('Project ID must be integer') link = ProjectDatasetLinkI() link.setParent(ProjectI(project_id, False)) link.setChild(DatasetI(dataset.getId(), False)) conn.getUpdateService().saveObject(link) return dataset.getId()
def projects_userA(request, userA, groupA): """ Returns new OMERO Project """ to_save = [] ctx = {'omero.group': str(groupA.id.val)} for name in "test_ann1", "test_ann2": project = ProjectI() project.name = rstring(name) to_save.append(project) projects = get_update_service(userA).saveAndReturnArray(to_save, ctx) return projects
def create_new_dataset(ctx, project_id, ds_name, ): dataset_obj = omero.model.DatasetI() dataset_obj.setName(rstring(ds_name)) dataset_obj = gateway.getUpdateService(ctx).saveAndReturnObject(dataset_obj) dataset_id = dataset_obj.getId().getValue() dm = gateway.getFacility(DataManagerFacility) link = ProjectDatasetLinkI(); link.setChild(dataset_obj); link.setParent(ProjectI(project_id, False)); r = dm.saveAndReturnObject(ctx, link); return dataset_id
def projects_dataset_image_tag(self): """ Returns 2 new OMERO Projects, linked Dataset and linked Image populated by an L{test.integration.library.ITest} instance with required fields set. Also a Tag linked to both Projects. """ project1 = ProjectI() project1.name = rstring(f'P1_{self.uuid()}') project2 = ProjectI() project2.name = rstring(f'P2_{self.uuid()}') dataset = DatasetI() dataset.name = rstring(f'D{self.uuid()}') image = self.new_image(f'I{self.uuid()}') dataset.linkImage(image) project1.linkDataset(dataset) project2.linkDataset(dataset) tag = TagAnnotationI() tag.textValue = rstring("ChgrpTag") project1.linkAnnotation(tag) project2.linkAnnotation(tag) return self.update.saveAndReturnArray([project1, project2])
def annotate_project(ann, project, user): """ Returns userA's Tag linked to userB's Project by userA and userB """ ctx = {'omero.group': str(project.details.group.id.val)} print "annotate_project", ctx link = ProjectAnnotationLinkI() link.parent = ProjectI(project.id.val, False) link.child = ann update = get_connection(user).getUpdateService() link = update.saveAndReturnObject(link, ctx) return link
def projects_user2_group1(request, names2, user2): """ Returns a new OMERO Project with required fields set and with a name that can be used to exercise sorting semantics. """ to_save = [] for name in names2: project = ProjectI() project.name = rstring(name) to_save.append(project) projects = get_update_service(user2).saveAndReturnArray(to_save) projects.sort(cmp_name_insensitive) return projects
def test_project(self): """Tests BlitzObjectWrapper.getName() returns string""" name = u'Pròjëct ©ψ' desc = u"Desc Φωλ" project = ProjectI() project.name = rstring(name) project.description = rstring(desc) proj = MockConnection(project).getObject("Project", 1) assert proj.getName() == name.encode('utf8') assert proj.name == name assert proj.getDescription() == desc.encode('utf8') assert proj.description == desc
def project_dataset_image(request, itest, update_service): """ Returns a new OMERO Project, linked Dataset and linked Image populated by an L{test.integration.library.ITest} instance with required fields set. """ project = ProjectI() project.name = rstring(itest.uuid()) dataset = DatasetI() dataset.name = rstring(itest.uuid()) image = itest.new_image(name=itest.uuid()) dataset.linkImage(image) project.linkDataset(dataset) return update_service.saveAndReturnObject(project)
def add_projects_key_values(gateway, key_values, project_ids, group_id, description=None): """Adds some key:value pairs to a list of images""" map_data = _dict_to_map_annotation(key_values, description) data_manager, ctx = _data_manager_generator(gateway, group_id) # Link the data to the image if not hasattr(project_ids, '__iter__'): project_ids = [project_ids] for ID in project_ids: link = ProjectAnnotationLinkI() link.setChild(map_data.asAnnotation()) link.setParent(ProjectI(ID, False)) data_manager.saveAndReturnObject(ctx, link)
def projects_user1_group2(request, names3, user1, group2): """ Returns new OMERO Projects with required fields set and with names that can be used to exercise sorting semantics. """ to_save = [] for name in names3: project = ProjectI() project.name = rstring(name) to_save.append(project) conn = get_connection(user1, group2.id.val) projects = conn.getUpdateService().saveAndReturnArray(to_save, conn.SERVICE_OPTS) projects.sort(key=lambda x: lower_or_none(unwrap(x.name))) return projects
def projects_user1_group1(request, names1, user1, project_hierarchy_user1_group1): """ Returns new OMERO Projects with required fields set and with names that can be used to exercise sorting semantics. """ to_save = [] for name in names1: project = ProjectI() project.name = rstring(name) to_save.append(project) projects = get_update_service(user1).saveAndReturnArray(to_save) projects.extend(project_hierarchy_user1_group1[:2]) projects.sort(key=lambda x: lower_or_none(unwrap(x.name))) return projects
def testChgrpDatasetToTargetProject(self, credentials): """ Tests that an Admin can move a user's Dataset to a private group and link it to an existing user's Project there. Also tests that the user can do the same chgrp themselves. """ # One user in two groups client, user = self.new_client_and_user(perms=PRIVATE) target_grp = self.new_group([user], perms=PRIVATE) eCtx = client.sf.getAdminService().getEventContext() # Reset session userId = eCtx.userId target_gid = target_grp.id.val # User creates Dataset in current group... update = client.sf.getUpdateService() ds = self.make_dataset(client=client) # ...and Project in target group ctx = {'omero.group': str(target_gid)} pr = self.new_project() pr = update.saveAndReturnObject(pr, ctx) requests = [] saves = [] chgrp = Chgrp2(targetObjects={"Dataset": [ds.id.val]}, groupId=target_gid) requests.append(chgrp) link = ProjectDatasetLinkI() link.details.owner = ExperimenterI(userId, False) link.child = DatasetI(ds.id.val, False) link.parent = ProjectI(pr.id.val, False) save = Save() save.obj = link saves.append(save) requests.extend(saves) if credentials == "user": c = client else: c = self.root self.doSubmit(requests, c, omero_group=target_gid) queryService = client.sf.getQueryService() ctx = {'omero.group': '-1'} # query across groups dataset = queryService.get('Dataset', ds.id.val, ctx) ds_gid = dataset.details.group.id.val assert target_gid == ds_gid,\ "Dataset should be in group: %s, NOT %s" % (target_gid, ds_gid)
def datasets(request, itest, update_service, names): """ Returns four new OMERO Datasets with required fields set and with names that can be used to exercise sorting semantics. """ to_save = [DatasetI(), DatasetI(), DatasetI(), DatasetI()] for index, dataset in enumerate(to_save): dataset.name = rstring(names[index]) # Non-orphaned Dataset to catch issues with queries where non-orphaned # datasets are included in the results. project = ProjectI() project.name = rstring(itest.uuid()) dataset = DatasetI() dataset.name = rstring(itest.uuid()) project.linkDataset(dataset) update_service.saveAndReturnObject(project) return update_service.saveAndReturnArray(to_save)
def add_project_tag(gateway, tag_text, project_id, description=None): """Adds a tag to a project""" data_manager = gateway.getFacility(DataManagerFacility) user = gateway.getLoggedInUser() ctx = SecurityContext(user.getGroupId()) # Arrange the data tag_data = TagAnnotationData(tag_text) if description: tag_data.setTagDescription(description) # Link the data to the image link = ProjectAnnotationLinkI() link.setChild(tag_data.asAnnotation()) link.setParent(ProjectI(project_id, False)) return data_manager.saveAndReturnObject(ctx, link)