def create_containers(self, cli, project, dataset): """ Creates containers with names provided if they don't exist already. Returns Dataset ID. """ sessionId = cli._event_context.sessionUuid conn = BlitzGateway(host='localhost') conn.connect(sUuid = sessionId) params = omero.sys.Parameters() params.theFilter = omero.sys.Filter() params.theFilter.ownerId = wrap(conn.getUser().getId()) d = None dsId = None if project is not None: # We need to find or create a project # This is not nice but we really shouldn't be dealing with large numbers of objects here plist = list(conn.getObjects("Project", attributes={'name': project}, params=params)) if len(plist) == 0: # Create project and dataset then link p = self.create_project(conn, project) d = self.create_dataset(conn, dataset) dsId = d.id.val self.link_dataset(conn, p.id.val, dsId) else: # Pick the first, it's as good as any p = plist[0] print "Using existing Project:", project # Since Project already exists check children for dataset for c in p.listChildren(): if c.getName() == dataset: d = c dsId = d.getId() # No existing child dataset so create one and link if d is None: d = self.create_dataset(conn, dataset) dsId = d.id.val self.link_dataset(conn, p.getId(), dsId) else: print "Using existing Dataset:", dataset else: # There may be more than one dataset with the same name # This is not nice but we really shouldn't be dealing with large numbers of objects here dlist = list(conn.getObjects("Dataset", attributes={'name': dataset}, params=params)) if len(dlist) != 0: # We want one without a parent, the first will do for c in dlist: if len(c.listParents()) == 0: d = c dsId = d.getId() if d is None: dsId = self.create_dataset(conn, dataset).id.val else: print "Using existing Dataset:", dataset return dsId
def create_containers(cli, dataset, project=None): """ Creates containers with names provided if they don't exist already. Returns Dataset ID. """ sessionId = cli._event_context.sessionUuid conn = BlitzGateway(host='localhost') conn.connect(sUuid=sessionId) params = omero.sys.Parameters() params.theFilter = omero.sys.Filter() params.theFilter.ownerId = wrap(conn.getUser().getId()) d = None prId = None if project is not None: p = conn.getObject("Project", attributes={'name': project}, params=params) if p is None: print "Creating Project:", project p = omero.model.ProjectI() p.name = wrap(project) prId = conn.getUpdateService().saveAndReturnObject(p).id.val else: print "Using Project:", project, p prId = p.getId() # Since Project already exists, check children for Dataset for c in p.listChildren(): if c.getName() == dataset: d = c if d is None: d = conn.getObject("Dataset", attributes={'name': dataset}, params=params) if d is None: print "Creating Dataset:", dataset d = omero.model.DatasetI() d.name = wrap(dataset) dsId = conn.getUpdateService().saveAndReturnObject(d).id.val if prId is not None: print "Linking Project-Dataset..." link = omero.model.ProjectDatasetLinkI() link.child = omero.model.DatasetI(dsId, False) link.parent = omero.model.ProjectI(prId, False) conn.getUpdateService().saveObject(link) else: print "Using Dataset:", dataset, d dsId = d.getId() return dsId
def connect_to_omero(user, password, host, port=4064): conn = BlitzGateway(user, password, host=host, port=port) print conn.connect() user = conn.getUser() print "Current user:"******" ID:", user.getId() print " Username:"******" Full Name:", user.getFullName() print "Member of:" for g in conn.getGroupsMemberOf(): print " ID:", g.getName(), " Name:", g.getId() group = conn.getGroupFromContext() print "Current group: ", group.getName() return conn
def testExperimenterListParents(self): """Test listParents() for Experimenter in ExperimenterGroup.""" client, exp = self.new_client_and_user() conn = BlitzGateway(client_obj=client) userGroupId = conn.getAdminService().getSecurityRoles().userGroupId exp = conn.getUser() groups = exp.listParents() assert len(groups) == 2 gIds = [g.id for g in groups] assert userGroupId in gIds # ExperimenterGroup has no parent assert len(groups[0].listParents()) == 0
def create_containers(cli, dataset, project=None): """ Creates containers with names provided if they don't exist already. Returns Dataset ID. """ sessionId = cli._event_context.sessionUuid conn = BlitzGateway() conn.connect(sUuid = sessionId) params = omero.sys.Parameters() params.theFilter = omero.sys.Filter() params.theFilter.ownerId = wrap(conn.getUser().getId()) d = None prId = None if project is not None: p = conn.getObject("Project", attributes={'name': project}, params=params) if p is None: print "Creating Project:", project p = omero.model.ProjectI() p.name = wrap(project) prId = conn.getUpdateService().saveAndReturnObject(p).id.val else: print "Using Project:", project, p prId = p.getId() # Since Project already exists, check children for Dataset for c in p.listChildren(): if c.getName() == dataset: d = c if d is None: d = conn.getObject("Dataset", attributes={'name': dataset}, params=params) if d is None: print "Creating Dataset:", dataset d = omero.model.DatasetI() d.name = wrap(dataset) dsId = conn.getUpdateService().saveAndReturnObject(d).id.val if prId is not None: print "Linking Project-Dataset..." link = omero.model.ProjectDatasetLinkI() link.child = omero.model.DatasetI(dsId, False) link.parent = omero.model.ProjectI(prId, False) conn.getUpdateService().saveObject(link) else: print "Using Dataset:", dataset, d dsId = d.getId() return dsId
def run_as_script(): """ Main entry point of the script, as called via the scripting service. """ client = scripts.client( 'Export_Tree_Hierarchy.py', 'Trigger an update of the symlink tree hierarchy on the sciCORE ' 'cluster filesystem.', authors = ["Niko Ehrenfeuchter"], institutions = ["IMCF, University of Basel"], contact = "*****@*****.**", ) try: # wrap client to use the Blitz Gateway conn = BlitzGateway(client_obj=client) username = conn.getUser().getName() markdir = os.path.join(os.environ['HOME'], '.omero_tree_export_usernames') if not os.path.exists(markdir): # do not create the marker directory, send back an error message # instead - the directory has to exist, otherwise the wrapper # daemon is not running! message = "ERROR: Marker directory '%s' missing!" % markdir client.setOutput("Message", rstring(message)) raise IOError("directory '%s' missing!" % markdir) filename = os.path.join(markdir, username) if os.path.exists(filename): message = ("WARNING: a request for username '%s' is already " "existing! Please contact an administrator if this " "request does not get processed soon!" % username) else: message = "Requested update for username '%s'." % username with open(filename, 'a') as out: out.write('%s' % username) client.setOutput("Message", rstring(message)) finally: # Cleanup client.closeSession()
def create_screen(self, cli, screen): """ Creates screen with name provided if it doesn't exist already. Returns Screen ID. """ sessionId = cli._event_context.sessionUuid conn = BlitzGateway(host='localhost') conn.connect(sUuid = sessionId) params = omero.sys.Parameters() params.theFilter = omero.sys.Filter() params.theFilter.ownerId = wrap(conn.getUser().getId()) slist = list(conn.getObjects("Screen", attributes={'name': screen}, params=params)) if len(slist) == 0: print "Creating Screen:", screen s = ScreenI() s.name = wrap(screen.encode('ascii','ignore')) scrId = conn.getUpdateService().saveAndReturnObject(s).id.val else: scrId = slist[0].getId() print "Using Screen:", screen return scrId
# Using secure connection. # ============================================================= # By default, once we have logged in, data transfer is not encrypted (faster) # To use a secure connection, call setSecure(True): # conn.setSecure(True) # <--------- Uncomment this # Current session details # ============================================================= # By default, you will have logged into your 'current' group in OMERO. This # can be changed by switching group in the OMERO.insight or OMERO.web clients. user = conn.getUser() print "Current user:"******" ID:", user.getId() print " Username:"******" Full Name:", user.getFullName() print "Member of:" for g in conn.getGroupsMemberOf(): print " ID:", g.getName(), " Name:", g.getId() group = conn.getGroupFromContext() print "Current group: ", group.getName() print "Other Members of current group:" for exp in conn.listColleagues(): print " ID:", exp.getId(), exp.getOmeName(), " Name:", exp.getFullName()
) sys.exit(1) # Using secure connection. # ============================================================= # By default, once we have logged in, data transfer is not encrypted (faster) # To use a secure connection, call setSecure(True): # conn.setSecure(True) # <--------- Uncomment this # Current session details # ============================================================= # By default, you will have logged into your 'current' group in OMERO. This # can be changed by switching group in the OMERO insight or web clients. user = conn.getUser() print "Current user:"******" ID:", user.getId() print " Username:"******" Full Name:", user.getFullName() print "Member of:" for g in conn.getGroupsMemberOf(): print " ID:", g.getName(), " Name:", g.getId() group = conn.getGroupFromContext() print "Current group: ", group.getName() print "Other Members of current group:" for exp in conn.listColleagues(): print " ID:", exp.getId(), exp.getOmeName( ), " Name:", exp.getFullName()
def run_script(): dataTypes = [rstring('Project'), rstring('Dataset')] # TODO: enable attaching to images dataTypes_attach = [rstring('Dataset'), rstring('Project')] """ The main entry point of the script, as called by the client via the scripting service, passing the required parameters. """ client = scripts.client( 'Remote_Import.py', """Remote import from dedicated workstations: * Import the content of the OMERO_ImportData/<username>/ folder on the selected workstation. * Appends files with the specified suffix to the Project or Dataset. * The scanned subfolder depth is 10 --------------------------------------------------------------- INPUT: --------------------------------------------------------------- Select PROJECT as TARGET for import : : A Dataset object is created for each subdirectory on OMERO_ImportData/<username>/ Select DATASET as TARGET for import : : All images (also images in subdirectories) are imported into this Dataset """, scripts.String( PARAM_WS, optional=False, grouping="1", description="Choose a workstation where you want to import from", values=WORKSTATION_NAMES), scripts.String(PARAM_DATATYPE, optional=True, grouping="2", description="Choose kind of destination object.", values=dataTypes), scripts.Long( PARAM_ID, optional=False, grouping="3", description= "ID of destination object. Please select only ONE object."), scripts.Bool( PARAM_SKIP_EXISTING, grouping="4", description= "skip files that are already uploaded (checked 'import from' path).", default=False), scripts.Bool(PARAM_ATTACH, grouping="5", description="Attach containing non image files", default=False), scripts.String(PARAM_DEST_ATTACH, grouping="5.1", description="Object to that should be attach", values=dataTypes_attach, default="Dataset"), scripts.String( PARAM_ATTACH_FILTER, grouping="5.2", description= "Filter files by given file extension (for example txt, pdf). Separated by ','." ), namespaces=[omero.constants.namespaces.NSDYNAMIC], version="1.1.0", authors=["Susanne Kunis", "CellNanOs"], institutions=["University of Osnabrueck"], contact="*****@*****.**", ) # noqa try: params = client.getInputs(unwrap=True) if os.path.exists(MOUNT_PATH): conn = BlitzGateway(client_obj=client) datapath = checkWorkstation(conn, params.get(PARAM_WS), MOUNT_PATH, conn.getUser().getName()) if datapath: robj, message = remoteImport(conn, params, datapath) else: message = "No data available on %s for user" % ( params.get(PARAM_WS)) robj = None client.setOutput("Message", rstring(message)) if robj is not None: client.setOutput("Result", robject(robj._obj)) else: client.setOutput( "ERROR", rstring("No such Mount directory: %s" % MOUNT_PATH)) finally: client.closeSession()
class Omg(object): """ OMERO gateway that wraps Blitz gateway and CLI, intended for scripting and interactive work. Attributes ---------- conn : Blitz gateway connection """ def __init__(self, conn=None, user=None, passwd=None, server=SERVER, port=PORT, skey=None): """ Requires active Blitz connection OR username plus password or sesskey """ if conn is None and (user is None or (passwd is None and skey is None)): raise ValueError("Bad parameters," + self.__init__.__doc__) if conn is not None: if conn.isConnected(): self.conn = conn else: raise ValueError("Cannot initialize with closed connection!") else: if passwd is not None: self.conn = BlitzGateway(user, passwd, host=server, port=port) self.conn.connect() else: self.conn = BlitzGateway(user, host=server, port=port) self.conn.connect(skey) if self.conn.isConnected(): self._server = self.conn.host self._port = self.conn.port self._user = self.conn.getUser().getName() self._key = self.conn.getSession().getUuid().getValue() print("Connected to {0} (port {1}) as {2}, session key={3}".format( self._server, self._port, self._user, self._key)) else: print("Failed to open connection :-(") def ls(self): """ Print groups, then projects/datasets/images for current group. """ print("Groups for {0}:-".format(self.conn.getUser().getName())) for gid, gname in self._ls_groups(): print(" {0} ({1})".format(gname, str(gid))) curr_grp = self.conn.getGroupFromContext() gid, gname = curr_grp.getId(), curr_grp.getName() print("\nData for current group, {0} ({1}):-".format(gname, gid)) for pid, pname in self._ls_projects(): print(" Project: {0} ({1})".format(pname, str(pid))) for did, dname in self._ls_datasets(pid): print(" Dataset: {0} ({1})".format(dname, str(did))) for iid, iname in self._ls_images(did): print(" Image: {0} ({1})".format(iname, str(iid))) # TODO, list orphaned Datasets and Images def _ls_groups(self): """list groups (id, name) this session is a member of""" groups = self.conn.getGroupsMemberOf() return [(group.getId(), group.getName()) for group in groups] def _ls_projects(self): """list projects (id, name) in the current session group""" projs = self.conn.listProjects(self.conn.getUserId()) return [(proj.getId(), proj.getName()) for proj in projs] def _ls_datasets(self, proj_id): """list datasets (id, name) within the project id given""" dsets = self.conn.getObject("Project", proj_id).listChildren() return [(dset.getId(), dset.getName()) for dset in dsets] def _ls_images(self, dset_id): """list images (id, name) within the dataset id given""" imgs = self.conn.getObject("Dataset", dset_id).listChildren() return [(img.getId(), img.getName()) for img in imgs] def chgrp(self, group_id): """ Change group for this session to the group_id given. """ self.conn.setGroupForSession(group_id) def get(self, im_id, get_att=True): """ Download the specified image as an OME-TIFF to current directory, with attachments also downloaded to folder: img_path + '_attachments' Return : path to downloaded image """ img = self.conn.getObject("Image", oid=im_id) img_name = self._unique_name(img.getName(), im_id) img_path = os.path.join(os.getcwd(), img_name) img_file = open(str(img_path + ".ome.tiff"), "wb") fsize, blockgen = img.exportOmeTiff(bufsize=65536) for block in blockgen: img_file.write(block) img_file.close() fa_type = omero.model.FileAnnotationI attachments = [ann for ann in img.listAnnotations() if ann.OMERO_TYPE == fa_type] if get_att and len(attachments) > 0: att_dir = img_path + "_attachments" os.mkdir(att_dir) def download_attachment(att, att_dir): """download OMERO file annotation to att_dir""" att_file = open(os.path.join(att_dir, att.getFileName()), "wb") for att_chunk in att.getFileInChunks(): att_file.write(att_chunk) att_file.close() for att in attachments: download_attachment(att, att_dir) return img_path def _unique_name(self, img_name, im_id): """Make unique name combining a file basename & OMERO Image id""" path_and_base, ext = os.path.splitext(img_name) base = os.path.basename(path_and_base) # name in OMERO can has path return "{0}_{1}".format(base, str(im_id)) def dget(self, dataset_id): """ Download an entire OMERO Dataset to the current directory. """ downloads = [] wdir = os.getcwd() dset_name = self.conn.getObject("Dataset", dataset_id).getName() dset_path = os.path.join(wdir, dset_name + "_D" + str(dataset_id)) os.mkdir(dset_path) os.chdir(dset_path) for img_id, img_name in self._ls_images(dataset_id): downloads.append(self.get(img_id)) os.chdir(wdir) return downloads def pget(self, project_id): """ Download an entire OMERO Project to the current directory. """ downloads = [] wdir = os.getcwd() proj_name = self.conn.getObject("Project", project_id).getName() proj_path = os.path.join(wdir, proj_name + "_P" + str(project_id)) os.mkdir(proj_path) os.chdir(proj_path) for dset_id, dset_name in self._ls_datasets(project_id): downloads.extend(self.dget(dset_id)) os.chdir(wdir) return downloads def put(self, filename, name=None, dataset=None): """ Import filename using OMERO CLI, optionally with a specified name to a specified dataset (dataset_id). Return : OMERO image Id """ cli = omero.cli.CLI() cli.loadplugins() import_args = ["import"] import_args.extend(["-s", str(self._server)]) import_args.extend(["-k", str(self._key)]) if dataset is not None: import_args.extend(["-d", str(dataset)]) if name is not None: import_args.extend(["-n", str(name)]) clio = "cli.out" clie = "cli.err" import_args.extend(["---errs=" + clie, "---file=" + clio, "--"]) import_args.append(filename) cli.invoke(import_args, strict=True) pix_id = int(open(clio, 'r').read().rstrip()) im_id = self.conn.getQueryService().get("Pixels", pix_id).image.id.val os.remove(clio) os.remove(clie) return im_id def describe(self, im_id, description): """ Append to image description. """ img = self.conn.getObject("Image", oid=im_id) old_description = img.getDescription() or "" img.setDescription(old_description + "\n" + description) img.save() def attach(self, im_id, attachments): """ Attach a list of files to an image. """ img = self.conn.getObject("Image", oid=im_id) for attachment in attachments.split(): fann = self.conn.createFileAnnfromLocalFile(attachment) img.linkAnnotation(fann) img.save() # TODO: ls_tags() and tag() methods? def mkp(self, project_name, description=None): """ Make new OMERO project in current group, returning the new project Id. """ # see: omero/lib/python/omeroweb/webclient/controller/container.py proj = omero.model.ProjectI() proj.name = omero.rtypes.rstring(str(project_name)) if description is not None and description != "": proj.description = omero.rtypes.rstring(str(description)) return self._save_and_return_id(proj) def mkd(self, dataset_name, project_id=None, description=None): """ Make new OMERO dataset, returning the new dataset Id. """ dset = omero.model.DatasetI() dset.name = omero.rtypes.rstring(str(dataset_name)) if description is not None and description != "": dset.description = omero.rtypes.rstring(str(description)) if project_id is not None: l_proj_dset = omero.model.ProjectDatasetLinkI() proj = self.conn.getObject("Project", project_id) l_proj_dset.setParent(proj._obj) l_proj_dset.setChild(dset) dset.addProjectDatasetLink(l_proj_dset) return self._save_and_return_id(dset) def _save_and_return_id(self, obj): """Save new omero object and return id assgined to it""" # see: OmeroWebGateway.saveAndReturnId # in: lib/python/omeroweb/webclient/webclient_gateway.py u_s = self.conn.getUpdateService() res = u_s.saveAndReturnObject(obj, self.conn.SERVICE_OPTS) res.unload() return res.id.val def im(self, im_id): """ Return an Im object for the image id specified. """ img = self.conn.getObject("Image", im_id) # build pixel np.ndarray nx, ny = img.getSizeX(), img.getSizeY() nz, nt, nc = img.getSizeZ(), img.getSizeT(), img.getSizeC() planes = [(z, c, t) for c in range(nc) for t in range(nt) for z in range(nz)] pix_gen = img.getPrimaryPixels().getPlanes(planes) pix = np.array([i for i in pix_gen]).reshape((nc, nt, nz, ny, nx)) # initialize Im using pix and extracted metadata meta = self._extract_meta(img, im_id) return Im(pix=pix, meta=meta) def _extract_meta(self, img, im_id): """Extract metadata attributes from OMERO Blitz gateway Image""" meta = {} meta['name'] = self._unique_name(img.getName(), im_id) meta['description'] = img.getDescription() def _extract_ch_info(ch): """extract core metadata for for channel, return as dict""" ch_info = {'label': ch.getLabel()} ch_info['ex_wave'] = ch.getExcitationWave() ch_info['em_wave'] = ch.getEmissionWave() ch_info['color'] = ch.getColor().getRGB() return ch_info meta['channels'] = [_extract_ch_info(ch) for ch in img.getChannels()] meta['pixel_size'] = {'x': img.getPixelSizeX(), 'y': img.getPixelSizeY(), 'z': img.getPixelSizeZ(), 'units': "um"} tag_type = omero.model.TagAnnotationI tags = [ann for ann in img.listAnnotations() if ann.OMERO_TYPE == tag_type] meta['tags'] = {tag.getValue() + " (" + str(tag.getId()) + ")": tag.getDescription() for tag in tags} fa_type = omero.model.FileAnnotationI attachments = [ann for ann in img.listAnnotations() if ann.OMERO_TYPE == fa_type] meta['attachments'] = [att.getFileName() + " (" + str(att.getId()) + ")" for att in attachments] user_id = self.conn.getUser().getName() + " (" + \ str(self.conn.getUser().getId()) + ") @" + self.conn.host meta_ext = {} meta_ext['user_id'] = user_id meta['meta_ext'] = meta_ext # TODO: ROIs, display settings? # objective: Image.loadOriginalMetadata()[1][find 'Lens ID Number'][1], return meta def imput(self, im, dataset_id=None): """ Create a new OMERO Image using an Im object, returning new image id. """ # see: omero/lib/python/omero/util/script_utils.py # see: omero/lib/python/omeroweb/webclient/webclient_gateway.py # see: https://gist.github.com/will-moore/4141708 if not isinstance(im, Im): raise TypeError("first imput argument must be of type Im") nc, nt, nz, ny, nx = im.shape ch_nums = range(nc) q_s = self.conn.getQueryService() p_s = self.conn.getPixelsService() c_s = self.conn.getContainerService() u_s = self.conn.getUpdateService() pu_s = self.conn.c.sf.createRawPixelsStore() q_ptype = "from PixelsType as p where p.value='{0}'".format( str(im.dtype)) pixelsType = q_s.findByQuery(q_ptype, None) im_id = p_s.createImage(nx, ny, nz, nt, ch_nums, pixelsType, im.name, im.description) img_i = c_s.getImages("Image", [im_id.getValue()], None)[0] img = self.conn.getObject("Image", im_id.getValue()) pix_id = img_i.getPrimaryPixels().getId().getValue() pu_s.setPixelsId(pix_id, True) for c in range(nc): for t in range(nt): for z in range(nz): plane = im.pix[c, t, z, :, :] script_utils.uploadPlaneByRow(pu_s, plane, z, c, t) l_dset_im = omero.model.DatasetImageLinkI() dset = self.conn.getObject("Dataset", dataset_id) l_dset_im.setParent(dset._obj) l_dset_im.setChild(img._obj) self._update_meta(im, im_id) u_s.saveObject(l_dset_im, self.conn.SERVICE_OPTS) return im_id.getValue() def _update_meta(self, im, im_id): """Set OMERO Image metadata using Im metadata"""
print """%s%s:%s Name:"%s" (owner=%s)""" % ( " " * indent, obj.OMERO_CLASS, obj.getId(), obj.getName(), obj.getOwnerOmeName()) # List all Projects available to the user currently logged in # =========================================================== # The only_owned=True parameter limits the Projects which are returned. # If the parameter is omitted or the value is False, then all Projects # visible in the current group are returned. print "\nList Projects:" print "=" * 50 my_expId = conn.getUser().getId() for project in conn.listProjects(my_expId): print_obj(project) for dataset in project.listChildren(): print_obj(dataset, 2) for image in dataset.listChildren(): print_obj(image, 4) # Retrieve the datasets owned by the user currently logged in # =========================================================== # Here we create an omero.sys.ParametersI instance which we # can use to filter the results that are returned. If we did # not pass the params argument to getObjects, then all Datasets # in the current group would be returned. print "\nList Datasets:"
def run(): """ Launch (remote) Priism ER deconvolution job on a list of images. Results imported back into dataset of origin for each image. """ # Build GUI dialog for user to choose images & update parameters client = script.client( "ER_Deconvolution.py", "ER deconvolution", script.String( "Data_Type", optional=False, grouping="1", values=[rstring('Image')], default="Image"), script.List( "IDs", optional=False, description="image IDs (must have original .dv file!)", grouping='2').ofType(rlong(0)), script.Int( "alpha", optional=False, description='regularization parameter "alpha" - try 1000-10000', grouping='3', default=job['par.alpha'], min=0), script.Float( "lambda f", optional=False, description='smoothing parameter "lambda f" - try 0.1-1.0', grouping='4', default=job['par.lamf'], min=0.0, max=1.0), script.Int( "iterations", optional=False, description="number of iterations - try 10-100", grouping='5', default=job['par.niter'], min=0), version="0.99", authors=["Graeme Ball"], institutions=["Dundee Imaging Facility"], contact="*****@*****.**" ) try: tempdir = None input_image_ids = [int(n) for n in client.getInput("IDs", unwrap=True)] job['par.alpha'] = client.getInput("alpha", unwrap=True) job['par.lamf'] = client.getInput("lambda f", unwrap=True) job['par.niter'] = client.getInput("iterations", unwrap=True) conn = BlitzGateway(client_obj=client) user = str(conn.getUser().getName()) group = str(conn.getGroupFromContext().getName()) sid = client.getSessionId() # export images (must be .dv!) to shared / temp storage tempdir = mktempdir(user, TEMP) inputs = [] for iid in input_image_ids: try: path = export_original_dvfile(conn, iid, tempdir) image = conn.getObject("Image", iid) fail(image is None, "No such image, ID=%d" % iid) did = image.getParent().getId() #did = image.getDataset().getId() inputs.append({'imageID': iid, 'path': path, 'datasetID': did}) except RuntimeError as e: print "Fail: " + str(e) jobs = [] for inp in inputs: command = dict(job) # copy command['inputs'] = [inp] # only 1 input image for this job jobs.append(json.dumps([command])) # only 1 command for this job # N.B. '.jobs' file format more flexible than needed here # write jobs definition file (1 line json string per job) jobs_filepath = os.path.join(tempdir, jobs_filename) with open(jobs_filepath, 'w') as f: f.writelines(["%s\n" % j for j in jobs]) # poll filesystem, checking for results client.enableKeepAlive(KEEPALIVE_PULSE) results_filepath = os.path.join(tempdir, results_filename) result_count = 0 # results .json file grows as results appear import_count = 0 # ensure we only attempt to import each result once tstart = time.time() while result_count < len(inputs) and (time.time() - tstart) < TIMEOUT: fail(not conn.isConnected(), "Connection lost!") alive_filepath = os.path.join(tempdir, alive_check_filename) with open(alive_filepath, 'w') as f: f.write("%f\n%d" % (time.time(), RESULTS_POLL_PULSE)) time.sleep(RESULTS_POLL_PULSE) if os.path.exists(results_filepath): with open(results_filepath, 'r') as fr: results = fr.readlines() # 1 line json string per result new_results = results[import_count:] import_count += import_results(new_results, user, group, sid, conn) result_count = len(results) if result_count < len(inputs): print "Job timed out after %d seconds, %d results imported" % \ (TIMEOUT, import_count) finally: if tempdir is not None and tempdir.startswith(TEMP): if os.path.exists(tempdir): shutil.rmtree(tempdir) # we checked 'tempdir' is sane first! client.closeSession()
print """%s%s:%s Name:"%s" (owner=%s)""" % (\ " " * indent, obj.OMERO_CLASS,\ obj.getId(),\ obj.getName(),\ obj.getOwnerOmeName()) # List all Projects available to me, and their Datasets and Images: # ================================================================= # The only_owned=True parameter limits the Projects which are returned. # If the parameter is omitted or the value is Fale, then all Projects # visible in the current group are returned. print "\nList Projects:" print "=" * 50 my_expId = conn.getUser().getId() for project in conn.listProjects(my_expId): print_obj(project) for dataset in project.listChildren(): print_obj(dataset, 2) for image in dataset.listChildren(): print_obj(image, 4) # Retrieve the datasets owned by the user currently logged in: # ================================================================= # Here we create an omero.sys.ParametersI instance which we # can use to filter the results that are returned. If we did # not pass the params argument to getObjects, then all Datasets # in the current group would be returned. print "\nList Datasets:" print "=" * 50
class OMEROConnection(object): def __init__(self, args, configs): self.args = args self.configs = configs self.connect_with = self.configs['CONNECT_WITH'] self.host = self.configs['OMERO_{}_HOST'.format(self.connect_with)] self.user = self.configs['OMERO_{}_USER'.format(self.connect_with)] self.password = self.configs['OMERO_{}_PASSWORD'.format( self.connect_with)] self.port = self.configs['OMERO_{}_PORT'.format(self.connect_with)] def __enter__(self): from omero.gateway import BlitzGateway # @UnresolvedImport self.conn = BlitzGateway(host=self.host, username=self.user, passwd=self.password, port=self.port) self.connected = self.conn.connect() # failed to connect if not self.connected: print_date( "Unable to connect to host '{}' on port {} using user '{}'". format(self.host, self.port, self.user)) print_date("Check that server is up") sys.exit(1) # keepalive self.conn.c.enableKeepAlive(5) self.omero_user = self.conn.getUser() self.userId = self.omero_user.getId() self.updateService = self.conn.getUpdateService() self.roiService = self.conn.getRoiService() return self def __exit__(self, exc_type, exc_value, traceback): # @UnusedVariable self.conn._closeSession() if self.args.verbose: print_date('Connection closed.') self.connected = False def list(self): """List images or ROIs :return int count: the number of images/ROIs """ if self.args.images: print_date("Listing images...") images = self.images() if self.args.summary: pass else: print_date("Structuring output...") image_view = ImageView(images) print(image_view) return len(images) elif self.args.rois: print_date("Listing ROIs...") rois = self.rois() if self.args.summary: pass else: print_date("Structuring output...") roi_view = ROIView(rois) print(roi_view) return len(rois) @property def projects(self): if self.args.project is not None: projects = self.conn.searchObjects(["Project"], self.args.project) return projects else: return self.conn.listProjects(self.userId) def datasets(self, project=None): """List the datasets associated with the current user :param project: a project :param bool in_project: are these datasets contained within a project? """ datasets = list() if self.args.dataset is not None: datasets += self.conn.searchObjects(["Dataset"], self.args.dataset) else: if project is not None: projects = self.conn.searchObjects(["Project"], project) for p in projects: datasets += p.listChildren() else: params = omero.sys.ParametersI() # @UndefinedVariable params.exp(self.userId) datasets += self.conn.getObjects("Dataset", params=params) return datasets def images(self, project=None, dataset=None): """The list of images associated with the current user If the image ID is specified only the required image is returned. (The project and dataset are ignored.) Otherwise: - If a project object is provided all images in all datasets in the project are returned. (The dataset is ignored.) - If a project object and dataset object are provided then only those images in the project and dataset are return. - If no project object is provided but a dataset object is provided then only those images in the dataset are returned. :param str project: OMERO project name :param str dataset: OMERO dataset name :return list images: a list of OMERO ``Image`` objects """ # assertions images = list() print_date("Retrieving images...") if self.args.image_id is not None: try: assert isinstance(self.args.image_id, int) or isinstance( self.args.image_id, long) except AssertionError: print_date("Invalid type for image ID: {}".format( type(self.args.image_id))) sys.exit(1) image = self.getImage(self.args.image_id) if image is not None: images.append(image) elif self.args.image_name is not None: images = self.conn.searchObjects(["Image"], self.args.image_name) else: if project is not None: # project specified print_date( "Searching for images in project '{}'".format(project)) # get all projects matching projects = self.conn.searchObjects(["Project"], project) # get all datasets in projects matching datasets_in_projects = dict() for p in projects: for d in p.listChildren(): datasets_in_projects[d.getName()] = d print_date("Found {} datasets in project '{}'".format( len(datasets_in_projects), project)) # dataset specified if dataset is not None: print_date( "Searching for images in dataset '{}'".format(dataset)) if dataset in datasets_in_projects.keys(): images += datasets_in_projects[dataset].listChildren() else: # dataset not specified print_date( "Searching for images in all {} datasets".format( len(datasets_in_projects))) for dataset in datasets_in_projects.keys(): images += datasets_in_projects[dataset].listChildren() else: # project not specified # dataset specified if dataset is not None: print_date( "Searching for images in dataset '{}'".format(dataset)) datasets = self.conn.searchObjects(["Dataset"], dataset) for dataset in datasets: images += dataset.listChildren() else: datasets = self.datasets() print_date( "Searching for images in all {} datasets".format( len(datasets))) for dataset in datasets: images += dataset.listChildren() print_date("Found {} image(s).".format(len(images))) return images def getImage(self, image_id): """Get the image with the image ID specified on the command line :param int image_id: command line arguments :return image: an image :rtype image: ``OMEROImage`` """ return self.conn.getObject("Image", image_id) def rois(self, project=None, dataset=None): """Get an iterator over the ROIs associated with the specified image ID :param int image_id: image ID """ rois = list() print_date("Retrieving ROIs...") if self.args.image_id is not None: return self.getROIs(self.args.image_id) else: for image in self.images(project, dataset): if image.getROICount() > 0: rois.append((image, self.getROIs(image.getId()))) roi_count = sum(map(lambda r: len(r[1]), rois)) print_date("Found {:,} ROIs in {:,} images.".format( roi_count, len(rois))) return rois def getROIs(self, image_id): result = self.roiService.findByImage(image_id, None) return result.rois def attachRois(self, omero_rois): """Attach the rois from the iterable""" non_rois = filter(lambda r: not isinstance(r, OMEROROI), omero_rois) try: assert len(non_rois) == 0 except AssertionError: print_date("Found {:,} non-ROI objects".format(len(non_rois))) return 1 for roi in omero_rois: self.saveRoi(roi) return os.EX_OK # save def saveRoi(self, roi): """Save the given ROI :param roi: an ROI object :type roi: `omero.model.Roi` """ import Ice try: self.updateService.saveObject(roi) except Ice.MemoryLimitException as e: # @UndefinedVariable print_date(str(e)) sys.exit(1) # delete def deleteRoi(self, roi_id): """ Delete the given ROI :param roi: an ROI object :type roi: `omero.model.Roi` """ from omero.callbacks import CmdCallbackI # @UnresolvedImport handle = self.conn.deleteObjects("Roi", [roi_id], deleteAnns=True, deleteChildren=True) callback = CmdCallbackI(self.conn.c, handle) while not callback.block(500): if self.args.verbose: print_date(".", newline=False, incl_date=False) time.sleep(2) callback.close(True)
class Omero(Observable): """Run a process for a pipeline Run a process from data in an Experiment and save the results in new ProcessedDataset in the experiment Parameters ---------- host Adresse of the Omero server port Port used to communicate with the server username Username to authenticate to the database password User password """ def __init__(self, host: str, port: int, username: str, password: str): Observable.__init__(self) self.conn = None self._host = host self._connect(host, port, username, password) def close(self): self.conn.close() def _connect(self, host: str, port: int, username: str, password: str): self.conn = BlitzGateway(username, password, host=host, port=port, secure=True) rv = self.conn.connect() if not rv: print("Unable to connect to the Omero database") else: user = self.conn.getUser() print("Current user:"******" ID:", user.getId()) print(" Username:"******" Full Name:", user.getFullName()) def import_dataset(self, experiment: Experiment, omero_dataset_id: int): dataset = self.conn.getObject("Dataset", omero_dataset_id) image_count = dataset.countChildren() k = 0 for image in dataset.listChildren(): k += 1 self.notify_observers( 100 * k / image_count, 'import image ' + str(k) + '/' + str(image_count)) self._import_image(experiment, image) self.notify_observers(100, 'Done') def import_image(self, experiment: Experiment, omero_image_id: int): image = self.conn.getObject("Image", omero_image_id) self._import_image(experiment, image) def _import_image(self, experiment: Experiment, image): # read metadata from omero rawdatasetdir = os.path.dirname(experiment.md_uri) filename = os.path.join(rawdatasetdir, 'data', image.getName()) author = image.getAuthor() date = image.getDate().strftime('%Y-%m-%d %I:%M %S %p') extension = os.path.splitext(image.getName())[1][1:] tags = dict() for ann in image.listAnnotations(): if ann.OMERO_TYPE == omero.model.MapAnnotationI: for kv in ann.getValue(): tags[kv[0]] = kv[1] # print('readed tags:') # print('\t author:', author) # print('\t date:', date) # print('\t extension:', extension) # print('\t tags:', tags) # write metadata to the experiment experiment.import_data( filename, image.getName(), author=author, format=extension, date=date, tags=tags, copy=False, ) # register tag to experiment if not exists for tag in tags: experiment.set_tag(tag, False) # copy image channel = 0 imageData = _get_data(image, channel) tif = TIFF.open(filename, mode='w') for t in range(imageData.shape[0]): for z in range(imageData.shape[1]): tif.write_image(imageData[t, z, :, :]) tif.close()
""" print("""%s%s:%s Name:"%s" (owner=%s)""" % (" " * indent, obj.OMERO_CLASS, obj.getId(), obj.getName(), obj.getOwnerOmeName())) # List all Projects owned by the user currently logged in # ======================================================= # By default this returns Projects from all owners across # all groups. We can filter by group and owner using the # optional opts dict (new in 5.3.0) # We also order by name and use 'limit' and 'offset', # to load the first 5 Projects print("\nList Projects:") print("=" * 50) my_exp_id = conn.getUser().getId() default_group_id = conn.getEventContext().groupId for project in conn.getObjects("Project", opts={ 'owner': my_exp_id, 'group': default_group_id, 'order_by': 'lower(obj.name)', 'limit': 5, 'offset': 0 }): print_obj(project) assert project.getDetails().getOwner().id == my_exp_id # We can get Datasets with listChildren, since we have the Project already. # Or conn.getObjects("Dataset", opts={'project', id}) if we have Project ID for dataset in project.listChildren(): print_obj(dataset, 2)