def connect_to_omero(user, password, host, port=4064):
    conn = BlitzGateway(user, password, host=host, port=port)
    print conn.connect()
    user = conn.getUser()
    print "Current user:"******"   ID:", user.getId()
    print "   Username:"******"   Full Name:", user.getFullName()
    print "Member of:"
    for g in conn.getGroupsMemberOf():
        print "   ID:", g.getName(), " Name:", g.getId()
    group = conn.getGroupFromContext()
    print "Current group: ", group.getName()
    return conn
def connect_to_omero(user, password, host, port=4064):
    conn = BlitzGateway(user, password, host=host, port=port)
    print conn.connect()
    user = conn.getUser()
    print "Current user:"******"   ID:", user.getId()
    print "   Username:"******"   Full Name:", user.getFullName()
    print "Member of:"
    for g in conn.getGroupsMemberOf():
        print "   ID:", g.getName(), " Name:", g.getId()
    group = conn.getGroupFromContext()
    print "Current group: ", group.getName()
    return conn
Exemplo n.º 3
0
class TestTree(lib.ITest):
    """
    Tests to ensure that OMERO.web "tree" infrastructure is working
    correctly.

    These tests make __extensive__ use of pytest fixtures.  In particular
    the scoping semantics allowing re-use of instances populated by the
    *request fixtures.  It is recommended that the pytest fixture
    documentation be studied in detail before modifications or attempts to
    fix failing tests are made:

     * https://pytest.org/latest/fixture.html
    """

    @classmethod
    def setup_class(cls):
        """Returns a logged in Django test client."""
        super(TestTree, cls).setup_class()
        cls.names = ('Apple', 'bat', 'atom', 'Butter')

    def setup_method(self, method):
        self.client = self.new_client(perms='rwr---')
        self.conn = BlitzGateway(client_obj=self.client)
        self.update = self.client.getSession().getUpdateService()

    @pytest.fixture
    def projects(self):
        """
        Returns four new OMERO Projects with required fields set and with
        names that can be used to exercise sorting semantics.
        """
        to_save = [ProjectI(), ProjectI(), ProjectI(), ProjectI()]
        for index, project in enumerate(to_save):
            project.name = rstring(self.names[index])
        return self.update.saveAndReturnArray(to_save)

    @pytest.fixture
    def project_dataset(self):
        """
        Returns a new OMERO Project and linked Dataset with required fields
        set.
        """
        project = ProjectI()
        project.name = rstring(self.uuid())
        dataset = DatasetI()
        dataset.name = rstring(self.uuid())
        project.linkDataset(dataset)
        return self.update.saveAndReturnObject(project)

    @pytest.fixture
    def project_dataset_image(self):
        """
        Returns a new OMERO Project, linked Dataset and linked Image populated
        by an L{test.integration.library.ITest} instance with required fields
        set.
        """
        project = ProjectI()
        project.name = rstring(self.uuid())
        dataset = DatasetI()
        dataset.name = rstring(self.uuid())
        image = self.new_image(name=self.uuid())
        dataset.linkImage(image)
        project.linkDataset(dataset)
        return self.update.saveAndReturnObject(project)

    @pytest.fixture
    def projects_datasets(self):
        """
        Returns four new OMERO Projects and four linked Datasets with required
        fields set and with names that can be used to exercise sorting
        semantics.
        """
        projects = [ProjectI(), ProjectI(), ProjectI(), ProjectI()]
        for index, project in enumerate(projects):
            project.name = rstring(self.names[index])
            datasets = [DatasetI(), DatasetI(), DatasetI(), DatasetI()]
            for index, dataset in enumerate(datasets):
                dataset.name = rstring(self.names[index])
                project.linkDataset(dataset)
        return self.update.saveAndReturnArray(projects)

    @pytest.fixture
    def datasets(self):
        """
        Returns four new OMERO Datasets with required fields set and with
        names that can be used to exercise sorting semantics.
        """
        to_save = [DatasetI(), DatasetI(), DatasetI(), DatasetI()]
        for index, dataset in enumerate(to_save):
            dataset.name = rstring(self.names[index])
        # Non-orphaned Dataset to catch issues with queries where non-orphaned
        # datasets are included in the results.
        project = ProjectI()
        project.name = rstring(self.uuid())
        dataset = DatasetI()
        dataset.name = rstring(self.uuid())
        project.linkDataset(dataset)
        self.update.saveAndReturnObject(project)
        return self.update.saveAndReturnArray(to_save)

    @pytest.fixture
    def datasets_different_users(self):
        """
        Returns two new OMERO Datasets created by different users with
        required fields set.
        """
        client = self.conn.c
        group = self.conn.getGroupFromContext()._obj
        datasets = list()
        # User that has already been created by the "client" fixture
        user, name = self.user_and_name(client)
        self.add_experimenters(group, [user])
        for name in (rstring(self.uuid()), rstring(self.uuid())):
            client, user = self.new_client_and_user(group=group)
            try:
                dataset = DatasetI()
                dataset.name = name
                update_service = client.getSession().getUpdateService()
                datasets.append(update_service.saveAndReturnObject(dataset))
            finally:
                client.closeSession()
        return datasets

    @pytest.fixture
    def screens(self):
        """
        Returns four new OMERO Screens with required fields set and with names
        that can be used to exercise sorting semantics.
        """
        to_save = [ScreenI(), ScreenI(), ScreenI(), ScreenI()]
        for index, screen in enumerate(to_save):
            screen.name = rstring(self.names[index])
        return self.update.saveAndReturnArray(to_save)

    @pytest.fixture
    def screens_different_users(self):
        """
        Returns two new OMERO Screens created by different users with
        required fields set.
        """
        client = self.conn.c
        group = self.conn.getGroupFromContext()._obj
        screens = list()
        # User that has already been created by the "client" fixture
        user, name = self.user_and_name(client)
        self.add_experimenters(group, [user])
        for name in (rstring(self.uuid()), rstring(self.uuid())):
            client, user = self.new_client_and_user(group=group)
            try:
                screen = ScreenI()
                screen.name = name
                update_service = client.getSession().getUpdateService()
                screens.append(update_service.saveAndReturnObject(screen))
            finally:
                client.closeSession()
        return screens

    @pytest.fixture
    def screen_plate_run(self):
        """
        Returns a new OMERO Screen, linked Plate, and linked PlateAcquisition
        with all required fields set.
        """
        screen = ScreenI()
        screen.name = rstring(self.uuid())
        plate = PlateI()
        plate.name = rstring(self.uuid())
        plate_acquisition = PlateAcquisitionI()
        plate.addPlateAcquisition(plate_acquisition)
        screen.linkPlate(plate)
        return self.update.saveAndReturnObject(screen)

    @pytest.fixture
    def screens_plates_runs(self):
        """
        Returns a two new OMERO Screens, two linked Plates, and two linked
        PlateAcquisitions with all required fields set.
        """
        screens = [ScreenI(), ScreenI()]
        for screen in screens:
            screen.name = rstring(self.uuid())
            plates = [PlateI(), PlateI()]
            for plate in plates:
                plate.name = rstring(self.uuid())
                plate_acquisitions = [
                    PlateAcquisitionI(), PlateAcquisitionI()]
                for plate_acquisition in plate_acquisitions:
                    plate.addPlateAcquisition(plate_acquisition)
                screen.linkPlate(plate)
        return self.update.saveAndReturnArray(screens)

    @pytest.fixture
    def screen_plate(self):
        """
        Returns a new OMERO Screen and linked Plate with required fields set.
        """
        screen = ScreenI()
        screen.name = rstring(self.uuid())
        plate = PlateI()
        plate.name = rstring(self.uuid())
        screen.linkPlate(plate)
        return self.update.saveAndReturnObject(screen)

    @pytest.fixture
    def screens_plates(self):
        """
        Returns four new OMERO Screens and four linked Plates with required
        fields set and with names that can be used to exercise sorting
        semantics.
        """
        screens = [ScreenI(), ScreenI(), ScreenI(), ScreenI()]
        for index, screen in enumerate(screens):
            screen.name = rstring(self.names[index])
            plates = [PlateI(), PlateI(), PlateI(), PlateI()]
            for index, plate in enumerate(plates):
                plate.name = rstring(self.names[index])
                screen.linkPlate(plate)
        return self.update.saveAndReturnArray(screens)

    @pytest.fixture
    def plates_runs(self):
        """
        Returns a four new Plates, and two linked PlateAcquisitions with
        required fields set and with names that can be used to exercise
        sorting semantics.
        """
        plates = [PlateI(), PlateI(), PlateI(), PlateI()]
        for index, plate in enumerate(plates):
            plate.name = rstring(self.names[index])
            plate_acquisitions = [PlateAcquisitionI(), PlateAcquisitionI()]
            for plate_acquisition in plate_acquisitions:
                plate.addPlateAcquisition(plate_acquisition)
        # Non-orphaned Plate to catch issues with queries where non-orphaned
        # plates are included in the results.
        screen = ScreenI()
        screen.name = rstring(self.uuid())
        plate = PlateI()
        plate.name = rstring(self.uuid())
        screen.linkPlate(plate)
        self.update.saveAndReturnObject(screen)
        return self.update.saveAndReturnArray(plates)

    @pytest.fixture
    def plate_run(self):
        """
        Returns a new OMERO Plate and linked PlateAcquisition with all
        required fields set.
        """
        plate = PlateI()
        plate.name = rstring(self.uuid())
        plate_acquisition = PlateAcquisitionI()
        plate.addPlateAcquisition(plate_acquisition)
        return self.update.saveAndReturnObject(plate)

    @pytest.fixture
    def plate(self):
        """
        Returns a new OMERO Plate with all required fields set.
        """
        plate = PlateI()
        plate.name = rstring(self.uuid())
        return self.update.saveAndReturnObject(plate)

    @pytest.fixture
    def plates_different_users(self):
        """
        Returns two new OMERO Plates created by different users with
        required fields set.
        """
        client = self.conn.c
        group = self.conn.getGroupFromContext()._obj
        plates = list()
        # User that has already been created by the "client" fixture
        user, name = self.user_and_name(client)
        self.add_experimenters(group, [user])
        for name in (rstring(self.uuid()), rstring(self.uuid())):
            client, user = self.new_client_and_user(group=group)
            try:
                plate = PlateI()
                plate.name = name
                update_service = client.getSession().getUpdateService()
                plates.append(update_service.saveAndReturnObject(plate))
            finally:
                client.closeSession()
        return plates

    @pytest.fixture
    def projects_different_users(self):
        """
        Returns two new OMERO Projects created by different users with
        required fields set.
        """
        client = self.conn.c
        group = self.conn.getGroupFromContext()._obj
        projects = list()
        # User that has already been created by the "client" fixture
        user, name = self.user_and_name(client)
        self.add_experimenters(group, [user])
        for name in (rstring(self.uuid()), rstring(self.uuid())):
            client, user = self.new_client_and_user(group=group)
            try:
                project = ProjectI()
                project.name = name
                update_service = client.getSession().getUpdateService()
                projects.append(update_service.saveAndReturnObject(project))
            finally:
                client.closeSession()
        return projects

    def test_marshal_project_dataset(self, project_dataset):
        project_id = project_dataset.id.val
        dataset, = project_dataset.linkedDatasetList()
        perms_css = 'canEdit canAnnotate canLink canDelete isOwned canChgrp'
        expected = [{
            'id': project_id,
            'childCount': 1L,
            'name': project_dataset.name.val,
            'datasets': [{
                'childCount': 0L,
                'id': dataset.id.val,
                'name': dataset.name.val,
                'permsCss': perms_css
            }],
            'permsCss': perms_css
        }]
Exemplo n.º 4
0
    # Current session details
    # =============================================================
    # By default, you will have logged into your 'current' group in OMERO. This
    # can be changed by switching group in the OMERO.insight or OMERO.web clients.

    user = conn.getUser()
    print "Current user:"******"   ID:", user.getId()
    print "   Username:"******"   Full Name:", user.getFullName()

    print "Member of:"
    for g in conn.getGroupsMemberOf():
        print "   ID:", g.getName(), " Name:", g.getId()
    group = conn.getGroupFromContext()
    print "Current group: ", group.getName()

    print "Other Members of current group:"
    for exp in conn.listColleagues():
        print "   ID:", exp.getId(), exp.getOmeName(), " Name:", exp.getFullName()

    print "Owner of:"
    for g in conn.listOwnedGroups():
        print "   ID:", g.getName(), " Name:", g.getId()

    # New in OMERO 5
    print "Admins:"
    for exp in conn.getAdministrators():
        print "   ID:", exp.getId(), exp.getOmeName(), " Name:", exp.getFullName()
Exemplo n.º 5
0
    # Current session details
    # =============================================================
    # By default, you will have logged into your 'current' group in OMERO. This
    # can be changed by switching group in the OMERO insight or web clients.

    user = conn.getUser()
    print "Current user:"******"   ID:", user.getId()
    print "   Username:"******"   Full Name:", user.getFullName()

    print "Member of:"
    for g in conn.getGroupsMemberOf():
        print "   ID:", g.getName(), " Name:", g.getId()
    group = conn.getGroupFromContext()
    print "Current group: ", group.getName()

    print "Other Members of current group:"
    for exp in conn.listColleagues():
        print "   ID:", exp.getId(), exp.getOmeName(
        ), " Name:", exp.getFullName()

    print "Owner of:"
    for g in conn.listOwnedGroups():
        print "   ID:", g.getName(), " Name:", g.getId()

    # The 'context' of our current session
    ctx = conn.getEventContext()
    # print ctx     # for more info
Exemplo n.º 6
0
    print "   Username:"******"   Full Name:", user.getFullName()

    if( args.group is not None ):
        print("change group")
        new_group = args.group
        groups = [ g.getName() for g in conn.listGroups() ]
        print(groups)
        if( new_group not in groups ):
            print("{} not found in groups:".format(new_group))
            for gn in groups:
                print("    {}".format(gn))
            sys.exit(1)
        else:
            conn.setGroupNameForSession(group)
    print conn.getGroupFromContext().getName()
    path    = os.getcwd()

    mdict= setup_dict()

    def package_file( mdict,conn, group, Id, path, name ):
        print "processing {} {} as : {}/{}".format(group,Id,path,name)
        groups = [ g.getName() for g in conn.listGroups() ]
        if( group not in groups ):
            print "    no group found"
            return
        if( group != conn.getGroupFromContext().getName() ):
            conn.setGroupNameForSession(group)

        download_file(    conn, Id, path, name )
        file_dict,thumb_dict = json_metadata(conn, Id, path, name)
Exemplo n.º 7
0
class Omg(object):
    """
    OMERO gateway that wraps Blitz gateway and CLI, intended for
    scripting and interactive work.

    Attributes
    ----------
    conn : Blitz gateway connection

    """

    def __init__(self, conn=None, user=None, passwd=None,
                 server=SERVER, port=PORT, skey=None):
        """
        Requires active Blitz connection OR username plus password or sesskey
        """
        if conn is None and (user is None or (passwd is None and skey is None)):
            raise ValueError("Bad parameters," + self.__init__.__doc__)
        if conn is not None:
            if conn.isConnected():
                self.conn = conn
            else:
                raise ValueError("Cannot initialize with closed connection!")
        else:
            if passwd is not None:
                self.conn = BlitzGateway(user, passwd, host=server, port=port)
                self.conn.connect()
            else:
                self.conn = BlitzGateway(user, host=server, port=port)
                self.conn.connect(skey)
        if self.conn.isConnected():
            self._server = self.conn.host
            self._port = self.conn.port
            self._user = self.conn.getUser().getName()
            self._key = self.conn.getSession().getUuid().getValue()
            print("Connected to {0} (port {1}) as {2}, session key={3}".format(
                  self._server, self._port, self._user, self._key))
        else:
            print("Failed to open connection :-(")

    def ls(self):
        """
        Print groups, then projects/datasets/images for current group.
        """
        print("Groups for {0}:-".format(self.conn.getUser().getName()))
        for gid, gname in self._ls_groups():
            print("  {0} ({1})".format(gname, str(gid)))
        curr_grp = self.conn.getGroupFromContext()
        gid, gname = curr_grp.getId(), curr_grp.getName()
        print("\nData for current group, {0} ({1}):-".format(gname, gid))
        for pid, pname in self._ls_projects():
            print("  Project: {0} ({1})".format(pname, str(pid)))
            for did, dname in self._ls_datasets(pid):
                print("    Dataset: {0} ({1})".format(dname, str(did)))
                for iid, iname in self._ls_images(did):
                    print("      Image: {0} ({1})".format(iname, str(iid)))
        # TODO, list orphaned Datasets and Images

    def _ls_groups(self):
        """list groups (id, name) this session is a member of"""
        groups = self.conn.getGroupsMemberOf()
        return [(group.getId(), group.getName()) for group in groups]

    def _ls_projects(self):
        """list projects (id, name) in the current session group"""
        projs = self.conn.listProjects(self.conn.getUserId())
        return [(proj.getId(), proj.getName()) for proj in projs]

    def _ls_datasets(self, proj_id):
        """list datasets (id, name) within the project id given"""
        dsets = self.conn.getObject("Project", proj_id).listChildren()
        return [(dset.getId(), dset.getName()) for dset in dsets]

    def _ls_images(self, dset_id):
        """list images (id, name) within the dataset id given"""
        imgs = self.conn.getObject("Dataset", dset_id).listChildren()
        return [(img.getId(), img.getName()) for img in imgs]

    def chgrp(self, group_id):
        """
        Change group for this session to the group_id given.
        """
        self.conn.setGroupForSession(group_id)

    def get(self, im_id, get_att=True):
        """
        Download the specified image as an OME-TIFF to current directory,
        with attachments also downloaded to folder: img_path + '_attachments'
        Return : path to downloaded image
        """
        img = self.conn.getObject("Image", oid=im_id)
        img_name = self._unique_name(img.getName(), im_id)
        img_path = os.path.join(os.getcwd(), img_name)
        img_file = open(str(img_path + ".ome.tiff"), "wb")
        fsize, blockgen = img.exportOmeTiff(bufsize=65536)
        for block in blockgen:
            img_file.write(block)
        img_file.close()
        fa_type = omero.model.FileAnnotationI
        attachments = [ann for ann in img.listAnnotations()
                       if ann.OMERO_TYPE == fa_type]
        if get_att and len(attachments) > 0:
            att_dir = img_path + "_attachments"
            os.mkdir(att_dir)

            def download_attachment(att, att_dir):
                """download OMERO file annotation to att_dir"""
                att_file = open(os.path.join(att_dir, att.getFileName()), "wb")
                for att_chunk in att.getFileInChunks():
                    att_file.write(att_chunk)
                att_file.close()

            for att in attachments:
                download_attachment(att, att_dir)
        return img_path

    def _unique_name(self, img_name, im_id):
        """Make unique name combining a file basename & OMERO Image id"""
        path_and_base, ext = os.path.splitext(img_name)
        base = os.path.basename(path_and_base)  # name in OMERO can has path
        return "{0}_{1}".format(base, str(im_id))

    def dget(self, dataset_id):
        """
        Download an entire OMERO Dataset to the current directory.
        """
        downloads = []
        wdir = os.getcwd()
        dset_name = self.conn.getObject("Dataset", dataset_id).getName()
        dset_path = os.path.join(wdir, dset_name + "_D" + str(dataset_id))
        os.mkdir(dset_path)
        os.chdir(dset_path)
        for img_id, img_name in self._ls_images(dataset_id):
            downloads.append(self.get(img_id))
        os.chdir(wdir)
        return downloads

    def pget(self, project_id):
        """
        Download an entire OMERO Project to the current directory.
        """
        downloads = []
        wdir = os.getcwd()
        proj_name = self.conn.getObject("Project", project_id).getName()
        proj_path = os.path.join(wdir, proj_name + "_P" + str(project_id))
        os.mkdir(proj_path)
        os.chdir(proj_path)
        for dset_id, dset_name in self._ls_datasets(project_id):
            downloads.extend(self.dget(dset_id))
        os.chdir(wdir)
        return downloads

    def put(self, filename, name=None, dataset=None):
        """
        Import filename using OMERO CLI, optionally with a specified name
        to a specified dataset (dataset_id).
        Return : OMERO image Id
        """
        cli = omero.cli.CLI()
        cli.loadplugins()
        import_args = ["import"]
        import_args.extend(["-s", str(self._server)])
        import_args.extend(["-k", str(self._key)])
        if dataset is not None:
            import_args.extend(["-d", str(dataset)])
        if name is not None:
            import_args.extend(["-n", str(name)])
        clio = "cli.out"
        clie = "cli.err"
        import_args.extend(["---errs=" + clie, "---file=" + clio, "--"])
        import_args.append(filename)
        cli.invoke(import_args, strict=True)
        pix_id = int(open(clio, 'r').read().rstrip())
        im_id = self.conn.getQueryService().get("Pixels", pix_id).image.id.val
        os.remove(clio)
        os.remove(clie)
        return im_id

    def describe(self, im_id, description):
        """
        Append to image description.
        """
        img = self.conn.getObject("Image", oid=im_id)
        old_description = img.getDescription() or ""
        img.setDescription(old_description + "\n" + description)
        img.save()

    def attach(self, im_id, attachments):
        """
        Attach a list of files to an image.
        """
        img = self.conn.getObject("Image", oid=im_id)
        for attachment in attachments.split():
            fann = self.conn.createFileAnnfromLocalFile(attachment)
            img.linkAnnotation(fann)
        img.save()

    # TODO: ls_tags() and tag() methods?

    def mkp(self, project_name, description=None):
        """
        Make new OMERO project in current group, returning the new project Id.
        """
        # see: omero/lib/python/omeroweb/webclient/controller/container.py
        proj = omero.model.ProjectI()
        proj.name = omero.rtypes.rstring(str(project_name))
        if description is not None and description != "":
            proj.description = omero.rtypes.rstring(str(description))
        return self._save_and_return_id(proj)

    def mkd(self, dataset_name, project_id=None, description=None):
        """
        Make new OMERO dataset, returning the new dataset Id.
        """
        dset = omero.model.DatasetI()
        dset.name = omero.rtypes.rstring(str(dataset_name))
        if description is not None and description != "":
            dset.description = omero.rtypes.rstring(str(description))
        if project_id is not None:
            l_proj_dset = omero.model.ProjectDatasetLinkI()
            proj = self.conn.getObject("Project", project_id)
            l_proj_dset.setParent(proj._obj)
            l_proj_dset.setChild(dset)
            dset.addProjectDatasetLink(l_proj_dset)
        return self._save_and_return_id(dset)

    def _save_and_return_id(self, obj):
        """Save new omero object and return id assgined to it"""
        # see: OmeroWebGateway.saveAndReturnId
        # in: lib/python/omeroweb/webclient/webclient_gateway.py
        u_s = self.conn.getUpdateService()
        res = u_s.saveAndReturnObject(obj, self.conn.SERVICE_OPTS)
        res.unload()
        return res.id.val

    def im(self, im_id):
        """
        Return an Im object for the image id specified.
        """
        img = self.conn.getObject("Image", im_id)
        # build pixel np.ndarray
        nx, ny = img.getSizeX(), img.getSizeY()
        nz, nt, nc = img.getSizeZ(), img.getSizeT(), img.getSizeC()
        planes = [(z, c, t) for c in range(nc)
                  for t in range(nt)
                  for z in range(nz)]
        pix_gen = img.getPrimaryPixels().getPlanes(planes)
        pix = np.array([i for i in pix_gen]).reshape((nc, nt, nz, ny, nx))
        # initialize Im using pix and extracted metadata
        meta = self._extract_meta(img, im_id)
        return Im(pix=pix, meta=meta)

    def _extract_meta(self, img, im_id):
        """Extract metadata attributes from OMERO Blitz gateway Image"""
        meta = {}
        meta['name'] = self._unique_name(img.getName(), im_id)
        meta['description'] = img.getDescription()

        def _extract_ch_info(ch):
            """extract core metadata for for channel, return as dict"""
            ch_info = {'label': ch.getLabel()}
            ch_info['ex_wave'] = ch.getExcitationWave()
            ch_info['em_wave'] = ch.getEmissionWave()
            ch_info['color'] = ch.getColor().getRGB()
            return ch_info

        meta['channels'] = [_extract_ch_info(ch) for ch in img.getChannels()]
        meta['pixel_size'] = {'x': img.getPixelSizeX(),
                              'y': img.getPixelSizeY(),
                              'z': img.getPixelSizeZ(),
                              'units': "um"}
        tag_type = omero.model.TagAnnotationI
        tags = [ann for ann in img.listAnnotations()
                if ann.OMERO_TYPE == tag_type]
        meta['tags'] = {tag.getValue() + " (" + str(tag.getId()) + ")":
                        tag.getDescription() for tag in tags}
        fa_type = omero.model.FileAnnotationI
        attachments = [ann for ann in img.listAnnotations()
                       if ann.OMERO_TYPE == fa_type]
        meta['attachments'] = [att.getFileName() + " (" + str(att.getId()) +
                               ")" for att in attachments]
        user_id = self.conn.getUser().getName() + " (" + \
            str(self.conn.getUser().getId()) + ") @" + self.conn.host
        meta_ext = {}
        meta_ext['user_id'] = user_id
        meta['meta_ext'] = meta_ext
        # TODO: ROIs, display settings?
        # objective: Image.loadOriginalMetadata()[1][find 'Lens ID Number'][1],
        return meta

    def imput(self, im, dataset_id=None):
        """
        Create a new OMERO Image using an Im object, returning new image id.
        """
        # see: omero/lib/python/omero/util/script_utils.py
        # see: omero/lib/python/omeroweb/webclient/webclient_gateway.py
        # see: https://gist.github.com/will-moore/4141708
        if not isinstance(im, Im):
            raise TypeError("first imput argument must be of type Im")
        nc, nt, nz, ny, nx = im.shape
        ch_nums = range(nc)
        q_s = self.conn.getQueryService()
        p_s = self.conn.getPixelsService()
        c_s = self.conn.getContainerService()
        u_s = self.conn.getUpdateService()
        pu_s = self.conn.c.sf.createRawPixelsStore()
        q_ptype = "from PixelsType as p where p.value='{0}'".format(
                  str(im.dtype))
        pixelsType = q_s.findByQuery(q_ptype, None)
        im_id = p_s.createImage(nx, ny, nz, nt, ch_nums, pixelsType,
                    im.name, im.description)
        img_i = c_s.getImages("Image", [im_id.getValue()], None)[0]
        img = self.conn.getObject("Image", im_id.getValue())
        pix_id = img_i.getPrimaryPixels().getId().getValue()
        pu_s.setPixelsId(pix_id, True)
        for c in range(nc):
            for t in range(nt):
                for z in range(nz):
                    plane = im.pix[c, t, z, :, :]
                    script_utils.uploadPlaneByRow(pu_s, plane, z, c, t)
        l_dset_im = omero.model.DatasetImageLinkI()
        dset = self.conn.getObject("Dataset", dataset_id)
        l_dset_im.setParent(dset._obj)
        l_dset_im.setChild(img._obj)
        self._update_meta(im, im_id)
        u_s.saveObject(l_dset_im, self.conn.SERVICE_OPTS)
        return im_id.getValue()

    def _update_meta(self, im, im_id):
        """Set OMERO Image metadata using Im metadata"""
def run():
    """
    Launch (remote) Priism ER deconvolution job on a list of images.
    Results imported back into dataset of origin for each image.
    """

    # Build GUI dialog for user to choose images & update parameters
    client = script.client(
        "ER_Deconvolution.py", "ER deconvolution",

        script.String(
            "Data_Type", optional=False,
            grouping="1", values=[rstring('Image')], default="Image"),

        script.List(
            "IDs", optional=False,
            description="image IDs (must have original .dv file!)",
            grouping='2').ofType(rlong(0)),

        script.Int(
            "alpha", optional=False,
            description='regularization parameter "alpha" - try 1000-10000',
            grouping='3', default=job['par.alpha'], min=0),

        script.Float(
            "lambda f", optional=False,
            description='smoothing parameter "lambda f" - try 0.1-1.0',
            grouping='4', default=job['par.lamf'], min=0.0, max=1.0),

        script.Int(
            "iterations", optional=False,
            description="number of iterations - try 10-100",
            grouping='5', default=job['par.niter'], min=0),

        version="0.99",
        authors=["Graeme Ball"],
        institutions=["Dundee Imaging Facility"],
        contact="*****@*****.**"
    )

    try:
        tempdir = None
        input_image_ids = [int(n) for n in client.getInput("IDs", unwrap=True)]
        job['par.alpha'] = client.getInput("alpha", unwrap=True)
        job['par.lamf'] = client.getInput("lambda f", unwrap=True)
        job['par.niter'] = client.getInput("iterations", unwrap=True)

        conn = BlitzGateway(client_obj=client)
        user = str(conn.getUser().getName())
        group = str(conn.getGroupFromContext().getName())
        sid = client.getSessionId()

        # export images (must be .dv!) to shared / temp storage
        tempdir = mktempdir(user, TEMP)
        inputs = []
        for iid in input_image_ids:
            try:
                path = export_original_dvfile(conn, iid, tempdir)
                image = conn.getObject("Image", iid)
                fail(image is None, "No such image, ID=%d" % iid)
                did = image.getParent().getId()
                #did = image.getDataset().getId()
                inputs.append({'imageID': iid, 'path': path, 'datasetID': did})
            except RuntimeError as e:
                print "Fail: " + str(e)

        jobs = []
        for inp in inputs:
            command = dict(job)  # copy
            command['inputs'] = [inp]  # only 1 input image for this job
            jobs.append(json.dumps([command]))  # only 1 command for this job
        # N.B. '.jobs' file format more flexible than needed here
        # write jobs definition file (1 line json string per job)
        jobs_filepath = os.path.join(tempdir, jobs_filename)
        with open(jobs_filepath, 'w') as f:
            f.writelines(["%s\n" % j for j in jobs])

        # poll filesystem, checking for results
        client.enableKeepAlive(KEEPALIVE_PULSE)
        results_filepath = os.path.join(tempdir, results_filename)
        result_count = 0  # results .json file grows as results appear
        import_count = 0  # ensure we only attempt to import each result once
        tstart = time.time()
        while result_count < len(inputs) and (time.time() - tstart) < TIMEOUT:
            fail(not conn.isConnected(), "Connection lost!")
            alive_filepath = os.path.join(tempdir, alive_check_filename)
            with open(alive_filepath, 'w') as f:
                f.write("%f\n%d" % (time.time(), RESULTS_POLL_PULSE))
            time.sleep(RESULTS_POLL_PULSE)
            if os.path.exists(results_filepath):
                with open(results_filepath, 'r') as fr:
                    results = fr.readlines()  # 1 line json string per result
                    new_results = results[import_count:]
                    import_count += import_results(new_results, user, group,
                                                   sid, conn)
                    result_count = len(results)
        if result_count < len(inputs):
            print "Job timed out after %d seconds, %d results imported" % \
                (TIMEOUT, import_count)

    finally:
        if tempdir is not None and tempdir.startswith(TEMP):
            if os.path.exists(tempdir):
                shutil.rmtree(tempdir)  # we checked 'tempdir' is sane first!
        client.closeSession()