def project_with_datasets(project): for dataset_id in range(1, 3): o = DatasetI() o.id = rlong(dataset_id) o.name = rstring('dataset_name_%d' % dataset_id) o.description = rstring('dataset_description_%d' % dataset_id) project.linkDataset(o) return project
def project_dataset(request, itest, update_service): """ Returns a new OMERO Project and linked Dataset with required fields set. """ project = ProjectI() project.name = rstring(itest.uuid()) dataset = DatasetI() dataset.name = rstring(itest.uuid()) project.linkDataset(dataset) return update_service.saveAndReturnObject(project)
def project_dataset(self): """ Returns a new OMERO Project and linked Dataset with required fields set. """ project = ProjectI() project.name = rstring(self.uuid()) dataset = DatasetI() dataset.name = rstring(self.uuid()) project.linkDataset(dataset) return self.update.saveAndReturnObject(project)
def post_dataset(conn, dataset_name, project_id=None, description=None): """Create a new dataset. Parameters ---------- conn : ``omero.gateway.BlitzGateway`` object OMERO connection. dataset_name : str Name of the Dataset being created. project_id : int, optional Id of Project in which to create the Dataset. If no Project is specified, the Dataset will be orphaned. description : str Description for the new Dataset. Returns ------- dataset_id : int Id of the dataset that has been created. Examples -------- Create a new orphaned Dataset: >>> did = post_dataset(conn, "New Dataset") >>> did 234 Create a new Dataset in Project:120: >>> did = post_dataset(conn, "Child of 120", project_id=120) >>> did """ if type(dataset_name) is not str: raise TypeError('Dataset name must be a string') if type(description) is not str and description is not None: raise TypeError('Dataset description must be a string') dataset = DatasetWrapper(conn, DatasetI()) dataset.setName(dataset_name) if description is not None: dataset.setDescription(description) dataset.save() if project_id is not None: if type(project_id) is not int: raise TypeError('Project ID must be integer') link = ProjectDatasetLinkI() link.setParent(ProjectI(project_id, False)) link.setChild(DatasetI(dataset.getId(), False)) conn.getUpdateService().saveObject(link) return dataset.getId()
def testListOrphans(self, orphaned, load_pixels, gatewaywrapper): # We login as 'User', since they have no other orphaned images gatewaywrapper.loginAsUser() conn = gatewaywrapper.gateway eid = conn.getUserId() # Create 5 orphaned images iids = [] for i in range(0, 5): img = gatewaywrapper.createTestImage(imageName=str(uuid.uuid1())) iids.append(img.id) # Create image in Dataset, to check this isn't found dataset = DatasetI() dataset.name = wrap('testListOrphans') image = ImageI() image.name = wrap('testListOrphans') dataset.linkImage(image) dataset = conn.getUpdateService().saveAndReturnObject(dataset) try: # Only test listOrphans() if orphaned if orphaned: # Pagination params = omero.sys.ParametersI() params.page(1, 3) findImagesInPage = list( conn.listOrphans("Image", eid=eid, params=params)) assert len(findImagesInPage) == 3 # No pagination (all orphans) findImages = list( conn.listOrphans("Image", loadPixels=load_pixels)) assert len(findImages) == 5 for p in findImages: assert p._obj.pixelsLoaded == load_pixels # Test getObjects() with 'orphaned' option opts = {'orphaned': orphaned, 'load_pixels': load_pixels} getImages = list(conn.getObjects("Image", opts=opts)) assert orphaned == (len(getImages) == 5) for p in getImages: assert p._obj.pixelsLoaded == load_pixels # Simply check this doesn't fail See https://github.com/ # openmicroscopy/openmicroscopy/pull/4950#issuecomment-264142956 dsIds = [d.id for d in conn.listOrphans("Dataset")] assert dataset.id.val in dsIds finally: # Cleanup - Delete what we created conn.deleteObjects('Image', iids, deleteAnns=True, wait=True) conn.deleteObjects('Dataset', [dataset.id.val], deleteChildren=True, wait=True)
def projects_datasets(request, itest, update_service, names): """ Returns four new OMERO Projects and four linked Datasets with required fields set and with names that can be used to exercise sorting semantics. """ projects = [ProjectI(), ProjectI(), ProjectI(), ProjectI()] for index, project in enumerate(projects): project.name = rstring(names[index]) datasets = [DatasetI(), DatasetI(), DatasetI(), DatasetI()] for index, dataset in enumerate(datasets): dataset.name = rstring(names[index]) project.linkDataset(dataset) return update_service.saveAndReturnArray(projects)
def project_dataset_image(self): """ Returns a new OMERO Project, linked Dataset and linked Image populated by an L{test.integration.library.ITest} instance with required fields set. """ project = ProjectI() project.name = rstring(self.uuid()) dataset = DatasetI() dataset.name = rstring(self.uuid()) image = self.new_image(name=self.uuid()) dataset.linkImage(image) project.linkDataset(dataset) return self.update.saveAndReturnObject(project)
def project_datasets(self, user1): """Return Project with Datasets and an orphaned Dataset.""" # Create and name all the objects project = ProjectI() project.name = rstring('Project') # Create 5 Datasets, each with 0-4 images. for d in range(5): dataset1 = DatasetI() dataset1.name = rstring('Dataset%s' % d) for i in range(d): image = self.create_test_image(size_x=5, size_y=5, session=user1[0].getSession(), name="Image%s" % i) image = ImageI(image.id.val, False) dataset1.linkImage(image) project.linkDataset(dataset1) # Create single orphaned Dataset dataset = DatasetI() dataset.name = rstring('Dataset') project = get_update_service(user1).saveAndReturnObject(project) dataset = get_update_service(user1).saveAndReturnObject(dataset) return project, dataset
def project_dataset_image(request, itest, update_service): """ Returns a new OMERO Project, linked Dataset and linked Image populated by an L{test.integration.library.ITest} instance with required fields set. """ project = ProjectI() project.name = rstring(itest.uuid()) dataset = DatasetI() dataset.name = rstring(itest.uuid()) image = itest.new_image(name=itest.uuid()) dataset.linkImage(image) project.linkDataset(dataset) return update_service.saveAndReturnObject(project)
def run(password, project_name, dataset_name, host, port): for user_number in range(1, 41): username = "******" % user_number print(username) conn = BlitzGateway(username, password, host=host, port=port) try: conn.connect() project = ProjectI() project.setName(rstring(project_name)) update_service = conn.getUpdateService() project = update_service.saveAndReturnObject(project) ds = conn.getObject("Dataset", attributes={'name': dataset_name}, opts={'owner': conn.getUserId()}) if ds is None: print("No dataset with name %s found" % dataset_name) continue dataset_id = ds.getId() print(username, dataset_id) link = ProjectDatasetLinkI() link.setParent(ProjectI(project.getId().getValue(), False)) link.setChild(DatasetI(dataset_id, False)) conn.getUpdateService().saveObject(link) except Exception as exc: print("Error while creating project: %s" % str(exc)) finally: conn.close()
def create_map_annotation(ctx, annotation, target_id, target_type="Project"): """Creates a map annotation, uploads it to Omero, and links it to target object""" # populate new MapAnnotationData object with dictionary result = ArrayList() for item in annotation: # add key:value pairs; both need to be strings result.add(NamedValue(str(item), str(annotation[item]))) data = MapAnnotationData() data.setContent(result) data.setDescription("Demo Example") #Use the following namespace if you want the annotation to be editable in the webclient and insight data.setNameSpace(MapAnnotationData.NS_CLIENT_CREATED) dm = gateway.getFacility(DataManagerFacility) target_obj = None # use the appropriate target DataObject and attach the MapAnnotationData object to it if target_type == "Project": target_obj = ProjectData(ProjectI(target_id, False)) elif target_type == "Dataset": target_obj = DatasetData(DatasetI(target_id, False)) elif target_type == "Image": target_obj = ImageData(ImageI(target_id, False)) result = dm.attachAnnotation(ctx, data, target_obj) return result
def mkdataset(self): self._uuid_ds = self.uuid().replace("-", "") self._dataset = DatasetI() self._dataset.name = rstring(self._uuid_ds) update = self.client.sf.getUpdateService() self._dataset = update.saveAndReturnObject(self._dataset) self.root.sf.getUpdateService().indexObject(self._dataset)
def project_hierarchy_user1_group1(self, user1): """ Returns OMERO Projects with Dataset Children with Image Children Note: This returns a list of mixed objects in a specified order """ # Create and name all the objects project1 = ProjectI() project1.name = rstring('Project1') project2 = ProjectI() project2.name = rstring('Project2') dataset1 = DatasetI() dataset1.name = rstring('Dataset1') dataset2 = DatasetI() dataset2.name = rstring('Dataset2') image1 = self.new_image(name='Image1') image2 = self.new_image(name='Image2') # Link them together like so: # project1 # dataset1 # image1 # image2 # dataset2 # image2 # project2 # dataset2 # image2 project1.linkDataset(dataset1) project1.linkDataset(dataset2) project2.linkDataset(dataset2) dataset1.linkImage(image1) dataset1.linkImage(image2) dataset2.linkImage(image2) to_save = [project1, project2] projects = get_update_service(user1).saveAndReturnArray(to_save) projects.sort(key=lambda x: lower_or_none(unwrap(x.name))) datasets = projects[0].linkedDatasetList() datasets.sort(key=lambda x: lower_or_none(unwrap(x.name))) images = datasets[0].linkedImageList() images.sort(key=lambda x: lower_or_none(unwrap(x.name))) return projects + datasets + images
def datasets(self): """ Returns four new OMERO Datasets with required fields set and with names that can be used to exercise sorting semantics. """ to_save = [DatasetI(), DatasetI(), DatasetI(), DatasetI()] for index, dataset in enumerate(to_save): dataset.name = rstring(self.names[index]) # Non-orphaned Dataset to catch issues with queries where non-orphaned # datasets are included in the results. project = ProjectI() project.name = rstring(self.uuid()) dataset = DatasetI() dataset.name = rstring(self.uuid()) project.linkDataset(dataset) self.update.saveAndReturnObject(project) return self.update.saveAndReturnArray(to_save)
def upload_csv_to_omero(ctx, file, tablename, target_id, target_type="Project"): """Upload the CSV file and attach it to the specified object""" print file print file.name svc = gateway.getFacility(DataManagerFacility) file_size = os.path.getsize(file.name) original_file = OriginalFileI() original_file.setName(rstring(tablename)) original_file.setPath(rstring(file.name)) original_file.setSize(rlong(file_size)) checksum_algorithm = ChecksumAlgorithmI() checksum_algorithm.setValue(rstring(ChecksumAlgorithmSHA1160.value)) original_file.setHasher(checksum_algorithm) original_file.setMimetype(rstring("text/csv")) original_file = svc.saveAndReturnObject(ctx, original_file) store = gateway.getRawFileService(ctx) # Open file and read stream store.setFileId(original_file.getId().getValue()) print original_file.getId().getValue() try: store.setFileId(original_file.getId().getValue()) with open(file.name, 'rb') as stream: buf = 10000 for pos in range(0, long(file_size), buf): block = None if file_size - pos < buf: block_size = file_size - pos else: block_size = buf stream.seek(pos) block = stream.read(block_size) store.write(block, pos, block_size) original_file = store.save() finally: store.close() # create the file annotation namespace = "training.demo" fa = FileAnnotationI() fa.setFile(original_file) fa.setNs(rstring(namespace)) if target_type == "Project": target_obj = ProjectData(ProjectI(target_id, False)) elif target_type == "Dataset": target_obj = DatasetData(DatasetI(target_id, False)) elif target_type == "Image": target_obj = ImageData(ImageI(target_id, False)) svc.attachAnnotation(ctx, FileAnnotationData(fa), target_obj)
def parse_target_object(target_object): type, id = target_object.split(':') if 'Dataset' == type: return DatasetI(long(id), False) if 'Plate' == type: return PlateI(long(id), False) if 'Screen' == type: return ScreenI(long(id), False) raise ValueError('Unsupported target object: %s' % target_object)
def dataset_images(self, user1): """Return Dataset with Images and an orphaned Image.""" dataset = DatasetI() dataset.name = rstring('Dataset') # Create 5 Images in Dataset for i in range(5): img = self.create_test_image(size_x=125, size_y=125, session=user1[0].getSession(), name="Image%s" % i) img = ImageI(img.id.val, False) dataset.linkImage(img) # Create a single orphaned Image image = self.create_test_image(size_x=125, size_y=125, session=user1[0].getSession()) dataset = get_update_service(user1).saveAndReturnObject(dataset) return dataset, image
def testChgrpImage(self): """ Tests chgrp for a dummny image object (no Pixels) """ # One user in two groups client, exp = self.new_client_and_user() grp = self.new_group([exp]) gid = grp.id.val client.sf.getAdminService().getEventContext() # Reset session update = client.sf.getUpdateService() query = client.sf.getQueryService() admin = client.sf.getAdminService() first_gid = admin.getEventContext().groupId # Create a dataset in the 'first group' ds = self.make_dataset(name="testChgrpImage_target", client=client) ds_id = ds.id.val # Change our context to new group and create image admin.setDefaultGroup(exp, ExperimenterGroupI(gid, False)) self.set_context(client, gid) update = client.sf.getUpdateService() # do we need to get this again? img = self.new_image() img = update.saveAndReturnObject(img) # Move image to new group chgrp = Chgrp2(targetObjects={'Image': [img.id.val]}, groupId=first_gid) # Link to Save link = DatasetImageLinkI() link.child = ImageI(img.id.val, False) link.parent = DatasetI(ds_id, False) save = Save() save.obj = link requests = [chgrp, save] # we're going to chgrp THEN save DIlink # Change our context to original group... admin.setDefaultGroup(exp, ExperimenterGroupI(first_gid, False)) self.set_context(client, first_gid) # We have to be in destination group for link Save to work self.doSubmit(requests, client) # ...check image img = client.sf.getQueryService().get("Image", img.id.val) assert img.details.group.id.val == first_gid # check Dataset query = "select link from DatasetImageLink link\ where link.child.id=%s" % img.id.val l = client.sf.getQueryService().findByQuery(query, None) assert l is not None, "New DatasetImageLink on image not found" assert l.details.group.id.val == first_gid,\ "Link Created in same group as Image target"
def datasets_different_users(request, itest, conn): """ Returns two new OMERO Datasets created by different users with required fields set. """ client = conn.c group = conn.getGroupFromContext()._obj datasets = list() # User that has already been created by the "client" fixture user, name = itest.user_and_name(client) itest.add_experimenters(group, [user]) for name in (rstring(itest.uuid()), rstring(itest.uuid())): client, user = itest.new_client_and_user(group=group) try: dataset = DatasetI() dataset.name = name update_service = client.getSession().getUpdateService() datasets.append(update_service.saveAndReturnObject(dataset)) finally: client.closeSession() return datasets
def projects_dataset_image_tag(self): """ Returns 2 new OMERO Projects, linked Dataset and linked Image populated by an L{test.integration.library.ITest} instance with required fields set. Also a Tag linked to both Projects. """ project1 = ProjectI() project1.name = rstring(f'P1_{self.uuid()}') project2 = ProjectI() project2.name = rstring(f'P2_{self.uuid()}') dataset = DatasetI() dataset.name = rstring(f'D{self.uuid()}') image = self.new_image(f'I{self.uuid()}') dataset.linkImage(image) project1.linkDataset(dataset) project2.linkDataset(dataset) tag = TagAnnotationI() tag.textValue = rstring("ChgrpTag") project1.linkAnnotation(tag) project2.linkAnnotation(tag) return self.update.saveAndReturnArray([project1, project2])
def datasets_different_users(self): """ Returns two new OMERO Datasets created by different users with required fields set. """ client = self.conn.c group = self.conn.getGroupFromContext()._obj datasets = list() # User that has already been created by the "client" fixture user, name = self.user_and_name(client) self.add_experimenters(group, [user]) for name in (rstring(self.uuid()), rstring(self.uuid())): client, user = self.new_client_and_user(group=group) try: dataset = DatasetI() dataset.name = name update_service = client.getSession().getUpdateService() datasets.append(update_service.saveAndReturnObject(dataset)) finally: client.closeSession() return datasets
def projects_dataset_image_tag(self): """ Returns 2 new OMERO Projects, linked Dataset and linked Image populated by an L{test.integration.library.ITest} instance with required fields set. Also a Tag linked to both Projects. """ project1 = ProjectI() project1.name = rstring(self.uuid()) project2 = ProjectI() project2.name = rstring(self.uuid()) dataset = DatasetI() dataset.name = rstring(self.uuid()) image = self.new_image(name=self.uuid()) dataset.linkImage(image) project1.linkDataset(dataset) project2.linkDataset(dataset) tag = TagAnnotationI() tag.textValue = rstring("ChgrpTag") project1.linkAnnotation(tag) project2.linkAnnotation(tag) return self.update.saveAndReturnArray([project1, project2])
def run(password, project_name, dataset_name, host, port): for user_number in range(1, 51): username = "******" % user_number print(username) conn = BlitzGateway(username, password, host=host, port=port) try: conn.connect() params = omero.sys.ParametersI() params.addString('username', username) # make sure only one result is returned by query params.page(0, 1) query = "from Project where name='%s' \ AND details.owner.omeName=:username \ ORDER BY id DESC" % project_name service = conn.getQueryService() pr_list = service.findAllByQuery(query, params, conn.SERVICE_OPTS) if pr_list is None: print("No project with name %s found" % project_name) continue project_id = pr_list[0].getId().getValue() print(username, project_id) params = omero.sys.ParametersI() params.addString('username', username) # make sure only one result is returned by query params.page(0, 1) query = "from Dataset where name='%s' \ AND details.owner.omeName=:username \ ORDER BY id DESC" % dataset_name service = conn.getQueryService() ds_list = service.findAllByQuery(query, params, conn.SERVICE_OPTS) if ds_list is None: print("No dataset with name %s found" % dataset_name) continue dataset_id = ds_list[0].getId().getValue() print(username, dataset_id) link = ProjectDatasetLinkI() link.setParent(ProjectI(project_id, False)) link.setChild(DatasetI(dataset_id, False)) conn.getUpdateService().saveObject(link) except Exception as exc: print("Error while linking to project: %s" % str(exc)) finally: conn.close()
def add_datasets_key_values(gateway, key_values, dataset_ids, group_id, description=None): """Adds some key:value pairs to a list of images""" map_data = _dict_to_map_annotation(key_values, description) data_manager, ctx = _data_manager_generator(gateway, group_id) # Link the data to the image if not hasattr(dataset_ids, '__iter__'): dataset_ids = [dataset_ids] for ID in dataset_ids: link = DatasetAnnotationLinkI() link.setChild(map_data.asAnnotation()) link.setParent(DatasetI(ID, False)) data_manager.saveAndReturnObject(ctx, link)
def testChgrpDatasetToTargetProject(self, credentials): """ Tests that an Admin can move a user's Dataset to a private group and link it to an existing user's Project there. Also tests that the user can do the same chgrp themselves. """ # One user in two groups client, user = self.new_client_and_user(perms=PRIVATE) target_grp = self.new_group([user], perms=PRIVATE) eCtx = client.sf.getAdminService().getEventContext() # Reset session userId = eCtx.userId target_gid = target_grp.id.val # User creates Dataset in current group... update = client.sf.getUpdateService() ds = self.make_dataset(client=client) # ...and Project in target group ctx = {'omero.group': str(target_gid)} pr = self.new_project() pr = update.saveAndReturnObject(pr, ctx) requests = [] saves = [] chgrp = Chgrp2(targetObjects={"Dataset": [ds.id.val]}, groupId=target_gid) requests.append(chgrp) link = ProjectDatasetLinkI() link.details.owner = ExperimenterI(userId, False) link.child = DatasetI(ds.id.val, False) link.parent = ProjectI(pr.id.val, False) save = Save() save.obj = link saves.append(save) requests.extend(saves) if credentials == "user": c = client else: c = self.root self.doSubmit(requests, c, omero_group=target_gid) queryService = client.sf.getQueryService() ctx = {'omero.group': '-1'} # query across groups dataset = queryService.get('Dataset', ds.id.val, ctx) ds_gid = dataset.details.group.id.val assert target_gid == ds_gid,\ "Dataset should be in group: %s, NOT %s" % (target_gid, ds_gid)
def images_to_dataset(conn, params): figure_ids = params["Figure_IDs"] dataset_id = params["IDs"][0] dataset = conn.getObject("Dataset", dataset_id) if dataset is None: return "Dataset %s not found" % dataset_id, dataset gid = dataset.getDetails().group.id.val print("Dataset: %s, Group: %s" % (dataset.name, gid)) update = conn.getUpdateService() conn.SERVICE_OPTS.setOmeroGroup(-1) if len(figure_ids) == 0: return "Enter Figure IDs separated with a comma: '1,2'", dataset image_ids = [] for figure_id in figure_ids: file_ann = conn.getObject("FileAnnotation", figure_id) if file_ann is None: print("Figure File-Annotation %s not found" % figure_id) figure_json = b"".join(list(file_ann.getFileInChunks())) figure_json = figure_json.decode('utf8') json_data = json.loads(figure_json) image_ids.extend([p["imageId"] for p in json_data.get("panels")]) image_ids = list(set(image_ids)) if len(image_ids) == 0: return "No Images found. Check Info log", dataset print("Image IDs: %s" % image_ids) conn.SERVICE_OPTS.setOmeroGroup(gid) added_count = 0 for image_id in image_ids: link = DatasetImageLinkI() link.parent = DatasetI(dataset_id, False) link.child = ImageI(image_id, False) try: update.saveObject(link, conn.SERVICE_OPTS) added_count += 1 except Exception: print("Image %s not linked to Dataset. " "Link exists or permissions failed" % image_id) return "Added %s images to Dataset" % added_count, dataset
def add_dataset_tag(gateway, tag_text, dataset_id, description=None): """Adds a tag to a dataset""" data_manager = gateway.getFacility(DataManagerFacility) user = gateway.getLoggedInUser() ctx = SecurityContext(user.getGroupId()) # Arrange the data tag_data = TagAnnotationData(tag_text) if description: tag_data.setTagDescription(description) # Link the data to the image link = DatasetAnnotationLinkI() link.setChild(tag_data.asAnnotation()) link.setParent(DatasetI(dataset_id, False)) return data_manager.saveAndReturnObject(ctx, link)
def chgrpImagesToTargetDataset(self, imgCount): """ Helper method to test chgrp of image(s) to target Dataset """ # One user in two groups client, user = self.new_client_and_user(perms=PRIVATE) admin = client.sf.getAdminService() target_grp = self.new_group([user], perms=PRIVATE) target_gid = target_grp.id.val images = self.importMIF(imgCount, client=client) ds = self.createDSInGroup(target_gid, client=client) # each chgrp includes a 'save' link to target dataset saves = [] ids = [] for i in images: ids.append(i.id.val) link = DatasetImageLinkI() link.child = ImageI(i.id.val, False) link.parent = DatasetI(ds.id.val, False) save = Save() save.obj = link saves.append(save) chgrp = Chgrp2(targetObjects={"Image": ids}, groupId=target_gid) requests = [chgrp] requests.extend(saves) self.doSubmit(requests, client, omero_group=target_gid) # Check Images moved to correct group queryService = client.sf.getQueryService() ctx = {'omero.group': '-1'} # query across groups for i in images: image = queryService.get('Image', i.id.val, ctx) img_gid = image.details.group.id.val assert target_gid == img_gid,\ "Image should be in group: %s, NOT %s" % (target_gid, img_gid) # Check Dataset has images linked dsImgs = client.sf.getContainerService().getImages( 'Dataset', [ds.id.val], None, ctx) assert len(dsImgs) == len(images),\ "All Images should be in target Dataset" previous_gid = admin.getEventContext().groupId return (ds, images, client, user, previous_gid, target_gid)
def datasets(request, itest, update_service, names): """ Returns four new OMERO Datasets with required fields set and with names that can be used to exercise sorting semantics. """ to_save = [DatasetI(), DatasetI(), DatasetI(), DatasetI()] for index, dataset in enumerate(to_save): dataset.name = rstring(names[index]) # Non-orphaned Dataset to catch issues with queries where non-orphaned # datasets are included in the results. project = ProjectI() project.name = rstring(itest.uuid()) dataset = DatasetI() dataset.name = rstring(itest.uuid()) project.linkDataset(dataset) update_service.saveAndReturnObject(project) return update_service.saveAndReturnArray(to_save)
def add_dataset_table(gateway, table_parameters, table_data, table_name, dataset_ids): """Appends a table to a project. :param gateway: a gateway to the omero server :param table_parameters: a list of 2 or 3 element-tuples containing: 1- a string with column name 2- a string with data type. Allowed values are: 'String', 'Long', 'Double' 3- optional, a string containing the description of the column :param table_data: a list of lists containing the data :param table_name: a string containing the table name. Must be unique :param dataset_ids: the id (of list of ids) of the target dataset(s) """ if not hasattr(dataset_ids, '__iter__'): dataset_ids = [dataset_ids] for dataset in dataset_ids: target = DatasetI(dataset, False) try: _add_table(gateway, table_parameters, table_data, table_name, target) except Exception as e: print(e)
def upload_rt_as_omero_table(rt, ctx, target_id, target_type, roivec=[]): "Convert results into an OMERO table" roivec_cols = ['ROI-id', 'Shape-id', 'Z', 'C', 'T'] length = len(roivec_cols) no_of_columns = rt.getLastColumn() + length no_of_rows = rt.size() data = [[Double(0) for x in range(no_of_rows)] for y in range(no_of_columns)] columns = [TableDataColumn] * no_of_columns for c in range(0, no_of_columns): if c < length: colname = roivec_cols[c] rows = [Double(i[c]) for i in roivec] columns[c] = TableDataColumn(colname, c, Double) else: colname = rt.getColumnHeading(c - length) rows = rt.getColumnAsDoubles(c - length) columns[c] = TableDataColumn(colname, c, Double) if rows is None: continue for r in range(0, len(rows)): data[c][r] = rows[r] table_data = TableData(columns, data) browse = gateway.getFacility(BrowseFacility) # use the appropriate target DataObject and attach the MapAnnotationData object to it if target_type == "Project": target_obj = ProjectData(ProjectI(target_id, False)) elif target_type == "Dataset": target_obj = DatasetData(DatasetI(target_id, False)) elif target_type == "Image": target_obj = browse.getImage(ctx, long(target_id)) table_facility = gateway.getFacility(TablesFacility) table_facility.addTable(ctx, target_obj, "Table_from_Fiji", table_data)
def create_project(with_datasets=False, with_images=False): project = ProjectI() project.id = rlong(1) project.name = rstring('the_name') project.description = rstring('the_description') dataset_count = 2 if not with_datasets: return project for dataset_id in range(0, dataset_count): dataset = DatasetI() dataset.id = rlong(dataset_id + 1) dataset.name = rstring('dataset_name_%d' % (dataset_id + 1)) dataset.description = rstring('dataset_description_%d' % (dataset_id + 1)) project.linkDataset(dataset) if not with_images: continue for image_id in range(1, 3): image_id = (dataset_id * dataset_count) + image_id dataset.linkImage(create_image(image_id)) return project
def create_project(with_datasets=False, with_images=False): project = ProjectI() project.id = rlong(1L) project.name = rstring("the_name") project.description = rstring("the_description") dataset_count = 2 if not with_datasets: return project for dataset_id in range(0, dataset_count): dataset = DatasetI() dataset.id = rlong(dataset_id + 1) dataset.name = rstring("dataset_name_%d" % (dataset_id + 1)) dataset.description = rstring("dataset_description_%d" % (dataset_id + 1)) project.linkDataset(dataset) if not with_images: continue for image_id in range(1, 3): image_id = (dataset_id * dataset_count) + image_id dataset.linkImage(create_image(image_id)) return project
def dataset(self): """Returns a new OMERO Dataset with required fields set.""" dataset = DatasetI() dataset.name = rstring(self.uuid()) return self.update.saveAndReturnObject(dataset)
to delete a FileAnnotation along with its associated OriginalFile and any annotation links. """ import omero import omero.callbacks from omero.rtypes import rstring from omero.model import DatasetI, FileAnnotationI, OriginalFileI from omero.model import DatasetAnnotationLinkI c = omero.client() ice_config = c.getProperty("Ice.Config") try: s = c.createSession() d = DatasetI() d.setName(rstring("FileAnnotationDelete")) d = s.getUpdateService().saveAndReturnObject(d) file = c.upload(ice_config) fa = FileAnnotationI() fa.setFile(OriginalFileI(file.id.val, False)) link = DatasetAnnotationLinkI() link.parent = DatasetI(d.id.val, False) link.child = fa link = s.getUpdateService().saveAndReturnObject(link) fa = link.child to_delete = {"Annotation": [fa.id.val]} delCmd = omero.cmd.Delete2(targetObjects=to_delete)
def create_dataset(self, conn, dataset): print "Creating new Dataset:", dataset d = DatasetI() d.name = wrap(dataset.encode('ascii','ignore')) return conn.getUpdateService().saveAndReturnObject(d)
def dataset(self): """Returns a new OMERO Project with required fields set.""" dataset = DatasetI() dataset.name = rstring(self.uuid()) return self.update.saveAndReturnObject(dataset)
def get_dataset_tables_file_names(gateway, dataset_id): """Returns a list with all the table names available in the project""" target = DatasetI(dataset_id, False) tables = _get_available_tables(gateway, target) return [t.getFileName() for t in tables]