Example #1
0
def datasets(request, project, conn=None, **kwargs):
    """Return {dataset: {name: Dataset, id:1}, image: {id: 2}}."""

    queryService = conn.getQueryService()

    params = ParametersI()
    params.addId(project)
    query = """select d from Dataset as d
               join fetch d.imageLinks imageLinks
               join fetch imageLinks.child
               join fetch d.projectLinks projectLinks
               where projectLinks.parent.id=:id 
            """
    result = queryService.findAllByQuery(query, params, conn.SERVICE_OPTS)
    data = []
    for d in result:
        for link in d.copyImageLinks():
            data.append({
                'dataset': {
                    'id': d.id.val,
                    'name': d.name.val
                },
                'image': {
                    'id': link.child.id.val
                }
            })
    return JsonResponse({'data': data})
Example #2
0
    def find_images(self):
        session = self.client.getSession()
        query_service = session.getQueryService()
        dataset_ids = [v.id.val for v in self.project.linkedDatasetList()]

        image_counts_per_dataset = self.get_image_counts_per_dataset(
            query_service)

        for dataset_id in dataset_ids:
            if image_counts_per_dataset.get(dataset_id, 0) < 1:
                log.info(
                    'Skipping Dataset:%d Project:%d, contains no Images!' %
                    (dataset_id, self.project.id.val))
                continue

            offset = 0
            count = limit = 100
            params = ParametersI().addId(dataset_id).page(offset, limit)
            while count == limit:
                t0 = time.time()
                images = query_service.findAllByQuery(QUERY_IMAGES, params,
                                                      {'omero.group': '-1'})
                log.info('Found %d Images in Dataset:%d Project:%d (%dms)' %
                         (len(images), dataset_id, self.project.id.val,
                          (time.time() - t0) * 1000))
                count = len(images)
                offset += count
                params.page(offset, limit)
                for image in images:
                    yield image
Example #3
0
def get_script(request, script_name, conn):
    """Return a JS function to filter images by various params."""
    dataset_id = request.GET.get('dataset')
    plate_id = request.GET.get('plate')
    field_id = request.GET.get('field')
    image_ids = request.GET.getlist('image')
    if plate_id and field_id:
        img_ids = get_image_ids(conn, plate_id, field_id)
    elif dataset_id:
        objects = conn.getObjects('Image', opts={'dataset': dataset_id})
        img_ids = [i.id for i in objects]
    else:
        img_ids = [long(i) for i in image_ids]
    query_service = conn.getQueryService()

    if script_name == "ROI_count":
        # Want to get ROI count for images in plate

        # Get ROI counts
        params = ParametersI()
        params.addIds(img_ids)
        query = "select roi.image.id, count(roi.id) from Roi roi "\
                "where roi.image.id in (:ids) group by roi.image"
        p = query_service.projection(query, params, conn.SERVICE_OPTS)
        roi_counts = {}
        for i in p:
            roi_counts[i[0].val] = i[1].val
        min_count = min(roi_counts.values())
        max_count = max(roi_counts.values())

        # Return a JS function that will be passed an object
        # e.g. {'type': 'Image', 'id': 1}
        # and should return true or false
        f = """(function filter(data, params) {
            var roi_counts = %s;
            if (isNaN(params.count) || params.count == '') return true;
            if (params.operator === '=')
                return roi_counts[data.id] == params.count;
            if (params.operator === '<')
                return roi_counts[data.id] < params.count;
            if (params.operator === '>')
                return roi_counts[data.id] > params.count;
        })
        """ % json.dumps(roi_counts)

        filter_params = [
            {'name': 'operator',
             'type': 'text',
             'values': ['>', '=', '<'],
             'default': '>'},
            {'name': 'count',
             'type': 'number',
             'default': '',
             'title': '%s-%s' % (min_count, max_count)}
        ]
        return JsonResponse(
            {
                'f': f,
                'params': filter_params,
            })
Example #4
0
def get_perms(conn, object_type, object_id, object_owner_id, object_group_id,
              cache):

    # Attempt to get permissions which have previously been recorded for this
    # group depending on if the object is owned or not
    perms = cache.get((object_group_id.val, object_owner_id.val))

    # If no cache, query an object to get the permissions for this group and
    # object ownership
    if perms is None:
        params = ParametersI()
        params.addId(object_id)
        q = '''
            select obj from %s obj where obj.id = :id
            ''' % object_type
        qs = conn.getQueryService()
        obj = qs.projection(q, params, conn.SERVICE_OPTS)[0][0].val

        perms_obj = obj.details.permissions

        # To be compatible with parse_permissions_css, convert the required
        # fields to a dictionary
        restrictions = ('canEdit', 'canAnnotate', 'canLink', 'canDelete')
        perms = {}
        for r in restrictions:
            if getattr(perms_obj, r)():
                perms[r] = True

        # Cache the result
        cache[(object_group_id.val, object_owner_id.val)] = perms

    return perms
    def import_pyramid_pre_fs(self, tmpdir):
        name = "test&sizeX=4000&sizeY=4000.fake"
        fakefile = tmpdir.join(name)
        fakefile.write('')
        pixels = self.import_image(filename=str(fakefile), skip="checksum")[0]
        id = long(float(pixels))
        # wait for the pyramid to be generated
        self.wait_for_pyramid(id)
        query_service = self.client.sf.getQueryService()
        pixels_service = self.client.sf.getPixelsService()
        orig_pix = query_service.findByQuery(
            "select p from Pixels p where p.id = :id",
            ParametersI().addId(id))
        orig_fs = query_service.findByQuery(
            "select f from Image i join i.fileset f where i.id = :id",
            ParametersI().addId(orig_pix.image.id.val))

        try:
            new_img = pixels_service.copyAndResizeImage(
                orig_pix.image.id.val, rint(4000), rint(4000), rint(1),
                rint(1), [0], None, True).val
            pix_id = unwrap(
                query_service.projection(
                    "select p.id from Image i join i.pixels p where i.id = :id",
                    ParametersI().addId(new_img)))[0][0]
            # This won't work but it but we then have a pyramid without fileset
            self.copyPixels(orig_pix, PixelsI(pix_id, False))
        except omero.InternalException:
            print "Cannot copy pixels for image %s" % orig_pix.image.id.val
        finally:
            self.delete([orig_fs])
        return pix_id
Example #6
0
    def createSynthetic(self):
        """ Create a image with archived files (i.e. pre-FS) """

        from omero.sys import ParametersI

        # Produce an FS image as our template
        orig_img = self.importMIF(name="reimport",
                                  sizeX=16,
                                  sizeY=16,
                                  with_companion=True)
        orig_img = orig_img[0]
        orig_pix = self.query.findByQuery(
            "select p from Pixels p where p.image.id = :id",
            ParametersI().addId(orig_img.id.val))
        orig_fs = self.query.findByQuery(
            "select f from Image i join i.fileset f where i.id = :id",
            ParametersI().addId(orig_img.id.val))

        try:
            new_img, new_pix = self.duplicateMIF(orig_img)
            self.copyPixels(orig_pix, new_pix)
            self.copyFiles(orig_img, new_img, new_pix)
            return new_img
        finally:
            self.delete("/Fileset", orig_fs)
Example #7
0
def get_perms(conn, object_type, object_id, object_owner_id, object_group_id, cache):

    # Attempt to get permissions which have previously been recorded for this
    # group depending on if the object is owned or not
    perms = cache.get((object_group_id.val, object_owner_id.val))

    # If no cache, query an object to get the permissions for this group and
    # object ownership
    if perms is None:
        params = ParametersI()
        params.addId(object_id)
        q = (
            """
            select obj from %s obj where obj.id = :id
            """
            % object_type
        )
        qs = conn.getQueryService()
        obj = qs.projection(q, params, conn.SERVICE_OPTS)[0][0].val

        perms_obj = obj.details.permissions

        # To be compatible with parse_permissions_css, convert the required
        # fields to a dictionary
        restrictions = ("canEdit", "canAnnotate", "canLink", "canDelete")
        perms = {}
        for r in restrictions:
            if getattr(perms_obj, r)():
                perms[r] = True

        # Cache the result
        cache[(object_group_id.val, object_owner_id.val)] = perms

    return perms
Example #8
0
def well_annotations(request, screen, conn=None, **kwargs):
    """
    Return Annotations on child Wells.
    JSON format same as for webclient/api/annotations/?type=map
    """

    ann_type = request.GET.get('type', None)

    # get wells in Screen
    queryService = conn.getQueryService()
    params = ParametersI()
    params.addId(screen)
    query = """select well.id from Well as well
               join well.plate plate
               join plate.screenLinks screenLinks
               where screenLinks.parent.id=:id
            """
    result = queryService.projection(query, params, conn.SERVICE_OPTS)
    iids = [r[0].val for r in result]
    anns, exps = marshal_annotations(conn,
                                     well_ids=iids,
                                     ann_type=ann_type,
                                     limit=100000)

    return JsonResponse({'annotations': anns, 'experimenters': exps})
Example #9
0
def index_projects(es, index, client, project_ids):
    session = client.getSession()
    query_service = session.getQueryService()

    for project_id in project_ids:
        log.info('Processing Project:%d' % project_id)

        params = ParametersI()
        params.addId(project_id)
        t0 = time.time()
        project = query_service.findByQuery(
            QUERY_PROJECT, params, {'omero.group': '-1'}
        )
        log.info(
            'Loaded Project:%d (%dms)' % (
                project_id, (time.time() - t0) * 1000
            )
        )

        if project is None:
            log.warn('Project:%d has no Datasets or Images!' % project_id)
            continue

        t0 = time.time()
        document = ProjectDocument(client, project)
        log.info(
            'Created document from Project:%d (%dms)' % (
                project_id, (time.time() - t0) * 1000
            )
        )
        if es is None:
            print document
            for image_document in document.image_documents:
                print image_document
            continue

        logging.info('Using Elasticsearch index: %s' % index)

        t0 = time.time()
        result = es.index(
            index=index,
            doc_type='project',
            id=project_id,
            body=json.dumps(document.document)
        )
        log.info(
            'Index complete: %r (%dms)' % (
                result, (time.time() - t0) * 1000
            )
        )

        t0 = time.time()
        result = helpers.bulk(
            es, image_document_index_actions(document, index)
        )
        log.info(
            'Index complete: %r (%dms)' % (
                result, (time.time() - t0) * 1000
            )
        )
Example #10
0
def image_annotations(request, project, conn=None, **kwargs):
    """
    Return Annotations on child Images.
    JSON format same as for webclient/api/annotations/?type=map
    """

    ann_type = request.GET.get('type', None)

    # get images in Project
    queryService = conn.getQueryService()
    params = ParametersI()
    params.addId(project)
    query = """select image.id from Image as image
               join image.datasetLinks datasetLinks
               join datasetLinks.parent as dataset
               join dataset.projectLinks projectLinks
               where projectLinks.parent.id=:id
            """
    result = queryService.projection(query, params, conn.SERVICE_OPTS)
    iids = [r[0].val for r in result]
    anns, exps = marshal_annotations(conn,
                                     image_ids=iids,
                                     ann_type=ann_type,
                                     limit=100000)

    return JsonResponse({'annotations': anns, 'experimenters': exps})
Example #11
0
def index_screens(es, index, client, screen_ids):
    session = client.getSession()
    query_service = session.getQueryService()

    for screen_id in screen_ids:
        log.info('Processing Screen:%d' % screen_id)

        params = ParametersI()
        params.addId(screen_id)
        t0 = time.time()
        plates = query_service.findAllByQuery(
            QUERY_PLATES, params, {'omero.group': '-1'}
        )
        log.info(
            'Loaded %d Plates from Screen:%d (%dms)' % (
                len(plates), screen_id, (time.time() - t0) * 1000
            )
        )
        for plate in plates:
            plate_id = plate.id.val
            t0 = time.time()
            document = PlateDocument(client, plate)
            log.info(
                'Created document from Plate:%d (%dms)' % (
                    plate_id, (time.time() - t0) * 1000
                )
            )
            if es is None:
                print document
                for well_document in document.well_documents:
                    print well_document
                continue

            logging.info('Using Elasticsearch index: %s' % index)

            t0 = time.time()
            result = es.index(
                index=index,
                doc_type='plate',
                id=plate_id,
                body=json.dumps(document.document)
            )
            log.info(
                'Index complete: %r (%dms)' % (
                    result, (time.time() - t0) * 1000
                )
            )

            t0 = time.time()
            result = helpers.bulk(
                es, well_document_index_actions(document, index)
            )
            log.info(
                'Index complete: %r (%dms)' % (
                    result, (time.time() - t0) * 1000
                )
            )
def list_datasets(conn):
    print "\nList Datasets:"
    print "=" * 50
    params = ParametersI()
    params.exp(conn.getUser().getId())
    datasets = conn.getObjects("Dataset", params=params)
    for dataset in datasets:
        print "+Dataset: %s(%s)" % (dataset.getName(), dataset.getId())
        for image in dataset.listChildren():
            print "|-Image: %s(%s)" % (image.getName(), image.getId())
Example #13
0
 def get_image_counts_per_dataset(self, query_service):
     params = ParametersI()
     params.addId(self.project.id.val)
     t0 = time.time()
     image_counts = dict([
         (r[0].val, r[1].val) for r in query_service.projection(
             QUERY_IMAGE_COUNTS, params, {'omero.group': '-1'})
     ])
     log.info('Image counts: %s (%dms)' % (pformat(image_counts, indent=2),
                                           (time.time() - t0) * 1000))
     return image_counts
Example #14
0
def get_well_ids(conn, plate_id):
    """Get well IDs for Plate"""
    conn.SERVICE_OPTS.setOmeroGroup('-1')
    query_service = conn.getQueryService()
    params = ParametersI()
    params.addId(plate_id)
    query = "select well.id "\
            "from Well well "\
            "where well.plate.id = :id"
    p = query_service.projection(query, params, conn.SERVICE_OPTS)
    return [i[0].val for i in p]
def run():
    """
    """
    data_types = [rstring("Plate")]

    client = scripts.client(
        "Unlink_Images.py",
        "Unlink Images from a given Plate",

        scripts.String("Data_Type", optional=False, grouping="1",
                       description="The data type you want to work with.",
                       values=data_types,
                       default="Plate"),

        scripts.List("IDs", optional=False, grouping="2",
                     description="List of Plate IDs").ofType(rlong(0)),

        version="0.1",
        authors=["Chris Allan"],
        institutions=["Glencoe Software Inc."],
        contact="*****@*****.**",
    )

    try:
        script_params = {}
        for key in client.getInputKeys():
            if client.getInput(key):
                script_params[key] = client.getInput(key, unwrap=True)

        session = client.getSession()
        update_service = session.getUpdateService()
        query_service = session.getQueryService()

        count = 0
        for plate_id in script_params["IDs"]:
            params = ParametersI()
            params.addId(plate_id)
            plate = query_service.findByQuery(
                "SELECT p from Plate AS p "
                "LEFT JOIN FETCH p.wells as w "
                "LEFT JOIN FETCH w.wellSamples as ws "
                "WHERE p.id = :id", params)
            for well in plate.copyWells():
                count += well.sizeOfWellSamples()
                well.clearWellSamples()
            update_service.saveObject(plate)

        client.setOutput("Message", rstring(
            "Unlinking of %d Image(s) successful." % count))
    finally:
        client.closeSession()
def run():
    """
    """
    data_types = [rstring("Plate")]

    client = scripts.client(
        "Unlink_Images.py",
        "Unlink Images from a given Plate",

        scripts.String("Data_Type", optional=False, grouping="1",
                       description="The data type you want to work with.",
                       values=data_types,
                       default="Plate"),

        scripts.List("IDs", optional=False, grouping="2",
                     description="List of Plate IDs").ofType(rlong(0)),

        version="0.1",
        authors=["Chris Allan"],
        institutions=["Glencoe Software Inc."],
        contact="*****@*****.**",
    )

    try:
        script_params = {}
        for key in client.getInputKeys():
            if client.getInput(key):
                script_params[key] = client.getInput(key, unwrap=True)

        session = client.getSession()
        update_service = session.getUpdateService()
        query_service = session.getQueryService()

        count = 0
        for plate_id in script_params["IDs"]:
            params = ParametersI()
            params.addId(plate_id)
            plate = query_service.findByQuery(
                "SELECT p from Plate AS p "
                "LEFT JOIN FETCH p.wells as w "
                "LEFT JOIN FETCH w.wellSamples as ws "
                "WHERE p.id = :id", params)
            for well in plate.copyWells():
                count += well.sizeOfWellSamples()
                well.clearWellSamples()
            update_service.saveObject(plate)

        client.setOutput("Message", rstring(
            "Unlinking of %d Image(s) successful." % count))
    finally:
        client.closeSession()
Example #17
0
def stat_screens(query):

    tb = TableBuilder("Screen")
    tb.cols(["ID", "Plates", "Wells", "Images", "Planes", "Bytes"])

    plate_count = 0
    well_count = 0
    image_count = 0
    plane_count = 0
    byte_count = 0

    for study, screens in sorted(studies().items()):
        for screen, plates_expected in screens.items():
            params = ParametersI()
            params.addString("screen", screen)
            rv = unwrap(query.projection((
                "select s.id, count(distinct p.id), "
                "       count(distinct w.id), count(distinct i.id),"
                "       sum(cast(pix.sizeZ as long) * pix.sizeT * pix.sizeC), "
                "       sum(cast(pix.sizeZ as long) * pix.sizeT * pix.sizeC * "
                "           pix.sizeX * pix.sizeY * 2) "
                "from Screen s "
                "left outer join s.plateLinks spl "
                "left outer join spl.child as p "
                "left outer join p.wells as w "
                "left outer join w.wellSamples as ws "
                "left outer join ws.image as i "
                "left outer join i.pixels as pix "
                "where s.name = :screen "
                "group by s.id"), params))
            if not rv:
                tb.row(screen, "MISSING", "", "", "", "", "")
            else:
                for x in rv:
                    plate_id, plates, wells, images, planes, bytes = x
                    plate_count += plates
                    well_count += wells
                    image_count += images
                    if planes:
                        plane_count += planes
                    if bytes:
                        byte_count += bytes
                    else:
                        bytes = 0
                    if plates != len(plates_expected):
                        plates = "%s of %s" % (plates, len(plates_expected))
                    tb.row(screen, plate_id, plates, wells, images, planes,
                           filesizeformat(bytes))
    tb.row("Total", "", plate_count, well_count, image_count, plane_count,
           filesizeformat(byte_count))
    print str(tb.build())
Example #18
0
def get_dataset_image_ids(conn, dataset_id):
    """Get image IDs for images in Dataset"""

    conn.SERVICE_OPTS.setOmeroGroup('-1')
    query_service = conn.getQueryService()
    params = ParametersI()
    params.addId(dataset_id)
    query = "select img.id "\
            "from DatasetImageLink link "\
            "join link.child as img "\
            "where link.parent.id = :id"
    p = query_service.projection(query, params, conn.SERVICE_OPTS)
    img_ids = [i[0].val for i in p]
    return img_ids
Example #19
0
def get_project_image_ids(conn, project_id):
    """Get image IDs for images in Project"""
    conn.SERVICE_OPTS.setOmeroGroup('-1')
    query_service = conn.getQueryService()
    params = ParametersI()
    params.addId(project_id)
    query = "select link "\
            "from DatasetImageLink link "\
            "join fetch link.parent dataset "\
            "join fetch dataset.projectLinks plink "\
            "where plink.parent.id = :id "
    p = query_service.projection(query, params, conn.SERVICE_OPTS)
    img_ids = [i[0].val.child.id.val for i in p]
    return img_ids
Example #20
0
def list_microscopes(request, conn=None, **kwargs):

    params = ParametersI()
    params.addString('ns', wrap(JSON_FILEANN_NS))
    # q = """select new map(obj.id as id,
    #             obj.description as desc,
    #             o.firstName as firstName,
    #             o.lastName as lastName,
    #             e.time as time,
    #             f.name as name,
    #             obj as obj_details_permissions)
    #         from FileAnnotation obj
    #         join obj.details.owner as o
    #         join obj.details.creationEvent as e
    #         join obj.file.details as p
    #         join obj.file as f where obj.ns=:ns"""
    q = """select obj from FileAnnotation obj
            join obj.details.owner as o
            join obj.details.creationEvent as e
            join obj.file.details as p
            join obj.file as f where obj.ns=:ns"""

    qs = conn.getQueryService()
    # file_anns = qs.projection(q, params, conn.SERVICE_OPTS)
    file_annotations = qs.findAllByQuery(q, params, conn.SERVICE_OPTS)
    rsp = []
    for file_ann in file_annotations:
        print('fa', file_ann)
        fa_wrapper = FileAnnotationWrapper(conn, file_ann)
        file_wrapper = fa_wrapper.getFile()
        print('file_wrapper', file_wrapper)
        file_data = b"".join(list(file_wrapper.getFileInChunks()))
        print('file_data', file_data)
        json_data = json.loads(file_data.decode("utf-8"))
        # date = datetime.fromtimestamp(unwrap(fa['time'])/1000)
        # first_name = unwrap(file_ann)
        # last_name = unwrap(fa['lastName'])
        fig_file = {
            'id': file_ann.id.val,
            # 'name': unwrap(fa['name']),
            # 'description': unwrap(fa['desc']),
            # 'ownerFullName': "%s %s" % (first_name, last_name),
            # 'creationDate': time.mktime(date.timetuple()),
            # 'canEdit': fa['obj_details_permissions'].get('canEdit'),
            'microscope': json_data
        }
        rsp.append(fig_file)

    return JsonResponse({'data': rsp})
 def test_remove_pyramids_check_thumbnails(self, tmpdir, capsys):
     """Test check that the thumbnail is correctly created"""
     name = "big&sizeX=3500&sizeY=3500&little=false.fake"
     img_id = self.import_pyramid(tmpdir, name)
     query_service = self.client.sf.getQueryService()
     pix = query_service.findByQuery(
         "select p from Pixels p where p.image.id = :id",
         ParametersI().addId(img_id))
     tb = self.client.sf.createThumbnailStore()
     id = pix.id.val
     thumb_hash = None
     try:
         thumbs = tb.getThumbnailByLongestSideSet(rint(64), [id],
                                                  {'omero.group': '-1'})
         assert len(thumbs) == 1
         thumb_hash = self.calculate_sha1(thumbs[id])
         # remove the pyramid and the thumbnail
         self.args += ["--endian=big"]
         self.cli.invoke(self.args, strict=True)
         out, err = capsys.readouterr()
         thumbs = tb.getThumbnailByLongestSideSet(rint(64), [id],
                                                  {'omero.group': '-1'})
         assert len(thumbs) == 1
         # The clock should be returned during the pyramid generation
         digest = self.calculate_sha1(thumbs[id])
         assert digest != thumb_hash
         # The pyramid generation has now been triggered.
         self.wait_for_pyramid(id)
         thumbs = tb.getThumbnailByLongestSideSet(rint(64), [id],
                                                  {'omero.group': '-1'})
         digest = self.calculate_sha1(thumbs[id])
         # The thumbnail should now be back
         assert digest == thumb_hash
     finally:
         tb.close()
    def testTiles(self):
        from omero.model import PixelsI
        from omero.sys import ParametersI
        from omero.util.tiles import RPSTileLoop
        from omero.util.tiles import TileLoopIteration
        from numpy import fromfunction

        sizeX = 4096
        sizeY = 4096
        sizeZ = 1
        sizeC = 1
        sizeT = 1
        tileWidth = 1024
        tileHeight = 1024
        imageName = "testStitchBig4K-1Ktiles"
        description = None
        tile_max = float(255)

        pixelsService = self.client.sf.getPixelsService()
        queryService = self.client.sf.getQueryService()

        query = "from PixelsType as p where p.value='int8'"
        pixelsType = queryService.findByQuery(query, None)
        channelList = range(sizeC)
        iId = pixelsService.createImage(sizeX, sizeY, sizeZ, sizeT,
                                        channelList, pixelsType, imageName,
                                        description)

        image = queryService.findByQuery(
            "select i from Image i join fetch i.pixels where i.id = :id",
            ParametersI().addId(iId))
        pid = image.getPrimaryPixels().getId().getValue()

        def f(x, y):
            """
            create some fake pixel data tile (2D numpy array)
            """
            return (x * y) / (1 + x + y)

        def mktile(w, h):
            tile = fromfunction(f, (w, h))
            tile = tile.astype(int)
            tile[tile > tile_max] = tile_max
            return list(tile.flatten())

        tile = fromfunction(f, (tileWidth, tileHeight)).astype(int)
        tile_min = float(tile.min())
        tile_max = min(tile_max, float(tile.max()))

        class Iteration(TileLoopIteration):
            def run(self, data, z, c, t, x, y, tileWidth, tileHeight,
                    tileCount):
                tile2d = mktile(tileWidth, tileHeight)
                data.setTile(tile2d, z, c, t, x, y, tileWidth, tileHeight)

        loop = RPSTileLoop(self.client.sf, PixelsI(pid, False))
        loop.forEachTile(256, 256, Iteration())

        c = 0
        pixelsService.setChannelGlobalMinMax(pid, c, tile_min, tile_max)
Example #23
0
    def hql_query(self, query, params=None):
        ''' Execute the given HQL query and return the results. Optionally
            accepts a parameters object.
            For conveniance, will unwrap the OMERO types '''

        # Connect if not already connected
        if self.conn is None:
            self.connect()

        if params is None:
            params = ParametersI()

        # Set OMERO Group to -1 to query across all available data
        self.conn.SERVICE_OPTS.setOmeroGroup(-1)

        # Get the Query Service
        qs = self.conn.getQueryService()

        # Execute the query
        rows = qs.projection(query, params, self.conn.SERVICE_OPTS)

        # Unwrap the query results
        unwrapped_rows = []
        for row in rows:
            unwrapped_row=[]
            for column in row:
                if column is None:
                    unwrapped_row.append(None)
                else:
                    unwrapped_row.append(column.val)
            unwrapped_rows.append(unwrapped_row)

        return unwrapped_rows
Example #24
0
 def archivedFiles(self, img_obj):
     return \
         self.client.sf.getQueryService().findAllByQuery((
             "select o from Image i join i.pixels p "
             "join p.pixelsFileMaps m join m.parent o "
             "where i.id = :id"),
             ParametersI().addId(img_obj.id.val))
Example #25
0
def get_data(request, data_name, conn):
    """Return table data for images in a Plate."""
    plate_id = request.GET.get('plate')
    field_id = request.GET.get('field')

    # dict of well_id: img_id
    img_ids = get_well_image_ids(conn, plate_id, field_id)
    print 'img_ids', img_ids
    query_service = conn.getQueryService()

    if data_name.startswith("Table_"):
        column_name = data_name.replace("Table_", "")

        # Load table and get data for named column
        params = ParametersI()
        params.addId(plate_id)
        query = """select oal from PlateAnnotationLink as oal
            left outer join fetch oal.child as ch
            left outer join oal.parent as pa
            where pa.id=:id
            and ch.ns='%s'""" % NSBULKANNOTATIONS
        links = query_service.findAllByQuery(query, params, conn.SERVICE_OPTS)
        shared_resources = conn.getSharedResources()
        # Just use the first Table we find
        # TODO: handle multiple tables!?
        file_id = links[0].child.file.id.val

        table = shared_resources.openTable(OriginalFileI(file_id),
                                           conn.SERVICE_OPTS)
        headers = table.getHeaders()
        column_names = [col.name for col in headers]
        col_index = column_names.index(column_name)
        rows = table.getNumberOfRows()

        # Load first column 'Well' & named column
        col_data = table.read([0, col_index], 0, rows).columns

        table_data = {}
        well_ids = col_data[0].values
        values = col_data[1].values
        for well_id, value in zip(well_ids, values):
            print 'well_id', well_id, value
            img_id = img_ids[well_id]
            table_data[img_id] = value

        return table_data
Example #26
0
def get_logfile(query, fid):
    from omero.sys import ParametersI
    q = ("select o from FilesetJobLink l "
         "join l.parent as fs join l.child as j "
         "join j.originalFileLinks l2 join l2.child as o "
         "where fs.id = :id and "
         "o.mimetype = 'application/omero-log-file'")
    return query.findByQuery(q, ParametersI().addId(fid))
def stat_screens(query):

    tb = TableBuilder("Container")
    tb.cols(["ID", "Set", "Wells", "Images", "Planes", "Bytes"])

    plate_count = 0
    well_count = 0
    image_count = 0
    plane_count = 0
    byte_count = 0

    for study, containers in sorted(studies().items()):
        for container, set_expected in sorted(containers.items()):
            params = ParametersI()
            params.addString("container", container)
            if "Plate" in set_expected:
                expected = set_expected["Plate"]
                rv = unwrap(query.projection(SPW_QUERY, params))
            elif "Dataset" in set_expected:
                expected = set_expected["Dataset"]
                rv = unwrap(query.projection(PDI_QUERY, params))
            else:
                raise Exception("unknown: %s" % set_expected.keys())

            if not rv:
                tb.row(container, "MISSING", "", "", "", "", "")
            else:
                for x in rv:
                    plate_id, plates, wells, images, planes, bytes = x
                    plate_count += plates
                    well_count += wells
                    image_count += images
                    if planes:
                        plane_count += planes
                    if bytes:
                        byte_count += bytes
                    else:
                        bytes = 0
                    if plates != len(expected):
                        plates = "%s of %s" % (plates, len(expected))
                    tb.row(container, plate_id, plates, wells, images, planes,
                           filesizeformat(bytes))
    tb.row("Total", "", plate_count, well_count, image_count, plane_count,
           filesizeformat(byte_count))
    print str(tb.build())
Example #28
0
def stat_screens(query):

    tb = TableBuilder("Container")
    tb.cols(["ID", "Set", "Wells", "Images", "Planes", "Bytes"])

    plate_count = 0
    well_count = 0
    image_count = 0
    plane_count = 0
    byte_count = 0

    for study, containers in sorted(studies().items()):
        for container, set_expected in sorted(containers.items()):
            params = ParametersI()
            params.addString("container", container)
            if "Plate" in set_expected:
                expected = set_expected["Plate"]
                rv = unwrap(query.projection(SPW_QUERY, params))
            elif "Dataset" in set_expected:
                expected = set_expected["Dataset"]
                rv = unwrap(query.projection(PDI_QUERY, params))
            else:
                raise Exception("unknown: %s" % set_expected.keys())

            if not rv:
                tb.row(container, "MISSING", "", "", "", "", "")
            else:
                for x in rv:
                    plate_id, plates, wells, images, planes, bytes = x
                    plate_count += plates
                    well_count += wells
                    image_count += images
                    if planes:
                        plane_count += planes
                    if bytes:
                        byte_count += bytes
                    else:
                        bytes = 0
                    if plates != len(expected):
                        plates = "%s of %s" % (plates, len(expected))
                    tb.row(container, plate_id, plates, wells, images, planes,
                           filesizeformat(bytes))
    tb.row("Total", "", plate_count, well_count, image_count, plane_count,
           filesizeformat(byte_count))
    print str(tb.build())
 def test_pagination(self, gateway, dtype):
     """Query should paginate."""
     offset = 1
     limit = 100
     p = ParametersI()
     p.page(offset, limit)
     # Test using 'params' argument
     with_params = gateway.buildQuery(dtype, params=p)
     # Test using 'opts' dictionary
     opts = {'offset': offset, 'limit': limit}
     with_opts = gateway.buildQuery(dtype, opts=opts)
     for result in [with_params, with_opts]:
         query, params, wrapper = result
         assert isinstance(query, str)
         assert isinstance(params, Parameters)
         assert isinstance(wrapper(), BlitzObjectWrapper)
         assert params.theFilter.offset.val == offset
         assert params.theFilter.limit.val == limit
 def test_filter_by_owner(self, gateway, dtype):
     """Query should filter by owner."""
     p = ParametersI()
     p.theFilter = Filter()
     p.theFilter.ownerId = wrap(2)
     # Test using 'params' argument
     with_params = gateway.buildQuery(dtype, params=p)
     # Test using 'opts' dictionary
     with_opts = gateway.buildQuery(dtype, opts={'owner': 1})
     for result in [with_params, with_opts]:
         query, params, wrapper = result
         assert isinstance(query, str)
         assert isinstance(params, Parameters)
         assert isinstance(wrapper(), BlitzObjectWrapper)
         if dtype not in ('experimenter', 'experimentergroup'):
             assert "where owner" in query
         else:
             assert "where owner" not in query
Example #31
0
 def find_wells(self):
     session = self.client.getSession()
     query_service = session.getQueryService()
     offset = 0
     count = limit = 100
     params = ParametersI().addId(self.plate.id.val).page(offset, limit)
     while count == limit:
         t0 = time.time()
         wells = query_service.findAllByQuery(QUERY_WELLS, params,
                                              {'omero.group': '-1'})
         log.info('Found %d Wells in Plate:%d (%dms)' %
                  (len(wells), self.plate.id.val,
                   (time.time() - t0) * 1000))
         count = len(wells)
         offset += count
         params.page(offset, limit)
         for well in wells:
             yield well
Example #32
0
def get_data(request, data_name, conn):
    """Return data for images in a Project, Dataset or Plate."""
    project_id = request.GET.get('project')
    dataset_id = request.GET.get('dataset')
    plate_id = request.GET.get('plate')
    field_id = request.GET.get('field')
    if project_id:
        img_ids = get_project_image_ids(conn, project_id)
    elif dataset_id:
        objects = conn.getObjects('Image', opts={'dataset': dataset_id})
        img_ids = [i.id for i in objects]
    elif plate_id and field_id:
        img_ids = get_image_ids(conn, plate_id, field_id)
    else:
        img_ids = request.GET.getlist('image')
    query_service = conn.getQueryService()

    if data_name == "ROI_stats_max_size":
        if plate_id:
            ns = "roi.pixel.intensities.summary"
            return get_image_map_annotations(conn, plate_id, 0, ns,
                                             "Max Points")

    if data_name == "ROI_count":
        # Want to get ROI count for images
        params = ParametersI()
        # Include "-1" so that if we have no Image IDs that the query does
        # not fail.  It will not match anything.
        params.addIds([-1] + img_ids)
        query = "select roi.image.id, count(roi.id) from Roi roi "\
                "where roi.image.id in (:ids) group by roi.image"
        p = query_service.projection(query, params, conn.SERVICE_OPTS)
        roi_counts = {}
        for i in img_ids:
            # Add placeholder 0 for all images
            roi_counts[i] = 0
        for i in p:
            roi_counts[i[0].val] = i[1].val
        return roi_counts

    if data_name == "sizeT":
        # Want to get sizeT for images
        params = ParametersI()
        # Include "-1" so that if we have no Image IDs that the query does
        # not fail.  It will not match anything.
        params.addIds([-1] + img_ids)
        query = "select pixels.image.id, pixels.sizeT from Pixels pixels "\
                "where pixels.image.id in (:ids)"
        p = query_service.projection(query, params, conn.SERVICE_OPTS)
        size_t = {}
        for i in p:
            size_t[i[0].val] = i[1].val
        return size_t
Example #33
0
 def testClassType(self):
     uuid = self.uuid()
     created = []
     for Ann in (CommentAnnotationI, TagAnnotationI):
         ann = Ann()
         ann.setNs(rstring(uuid))
         ann = self.update.saveAndReturnObject(ann)
         created.append(ann)
     query_string = """
     select type(a.class) from Annotation a
     where a.ns = :uuid
     """
     params = ParametersI()
     params.addString("uuid", uuid)
     rv = self.query.projection(query_string, params)
     rv = [x[0] for x in unwrap(rv)]
     assert len(rv) == 2
     assert "ome.model.annotations.CommentAnnotation" in rv
     assert "ome.model.annotations.TagAnnotation" in rv
def get_timestamps(conn, image):
    """Return a list of times (secs) 1 for each T-index in image."""
    params = ParametersI()
    params.addLong('pid', image.getPixelsId())
    query = "from PlaneInfo as Info where"\
        " Info.theZ=0 and Info.theC=0 and pixels.id=:pid"
    info_list = conn.getQueryService().findAllByQuery(query, params,
                                                      conn.SERVICE_OPTS)
    timemap = {}
    for info in info_list:
        t_index = info.theT.getValue()
        if info.deltaT is not None:
            delta_t = info.deltaT.getValue()
            timemap[t_index] = round(delta_t, 2)
    time_list = []
    for t in range(image.getSizeT()):
        if t in timemap:
            time_list.append(timemap[t])
    return time_list
Example #35
0
    def test_groups_experimenters(self, user1):
        """
        Test listing groups.

        We simply list existing Groups since we have no way to filter
        and show only those created in the test.
        """
        conn = get_connection(user1)
        user_name = conn.getUser().getName()
        django_client = self.new_django_client(user_name, user_name)
        request_url = reverse(
            'api_experimentergroups',
            kwargs={'api_version': api_settings.API_VERSIONS[-1]})
        data = {'limit': 10}
        rsp = get_json(django_client, request_url, data)
        groups_json = rsp['data']

        query = """select obj from ExperimenterGroup as obj order by
                   lower(obj.name), obj.id"""
        params = ParametersI()
        params.page(0, 10)
        groups = conn.getQueryService().findAllByQuery(query, params)

        assert_objects(conn, groups_json, groups, dtype="ExperimenterGroup")

        # Check experimenters_url for all groups above
        for group_json in groups_json:
            # Check we can follow link to Experimenters for first Group
            expimenters_url = group_json["url:experimenters"]
            rsp = get_json(django_client, expimenters_url)
            exps_json = rsp['data']
            exp_ids = [e['@id'] for e in exps_json]

            # Check if eids are same for group (won't be ordered)
            grp = conn.getObject("ExperimenterGroup", group_json['@id'])
            eids = [
                link.child.id.val for link in grp.copyGroupExperimenterMap()
            ]
            assert set(eids) == set(exp_ids)

            if len(exp_ids) > 0:
                assert_objects(conn, exps_json, exp_ids, dtype="Experimenter")
Example #36
0
 def copyFiles(self, orig_img, new_img, new_pix):
     # Then attach a copy of each of the used files in the fileset
     # to the synthetic image
     params = ParametersI()
     params.addId(orig_img.id.val)
     rows = unwrap(self.query.projection((
         "select f.id, f.name from Image i "
         "join i.fileset fs join fs.usedFiles uf "
         "join uf.originalFile f where i.id = :id"), params))
     for row in rows:
         file_id = row[0]
         file_name = row[1]
         target = create_path()
         src = OriginalFileI(file_id, False)
         self.client.download(ofile=src, filename=str(target))
         copy = self.client.upload(filename=str(target),
                                   name=file_name)
         link = PixelsOriginalFileMapI()
         link.parent = copy.proxy()
         link.child = new_pix
         self.update.saveObject(link)
Example #37
0
def annotation_ids_by_field(conn,
                            value="CMPO_0000077",
                            key="Phenotype Term Accession",
                            ns="openmicroscopy.org/mapr/phenotype"):
    """
    Return a list of IDs for map annotations with the given namespace
    that have a key=value pair matching the given parameters.
    """
    from omero.rtypes import unwrap
    from omero.sys import ParametersI
    params = ParametersI()
    params.addString("value", value)
    params.addString("key", key)
    params.addString("ns", ns)
    q = (
        "select a.id from MapAnnotation a join a.mapValue as mv "
        "where a.ns = :ns and mv.name = :key and mv.value = :value"
    )
    return unwrap(conn.getQueryService().projection(q, params))[0]
Example #38
0
    os.environ.get('IDR_PASSWORD', 'omero'),
    host=os.environ.get('IDR_HOST', 'localhost'))
conn.connect()
conn.setGroupForSession(3)  # Public

query_service = conn.getQueryService()

# Find the plates of idr0004.

query = """
SELECT child.id
  FROM ScreenPlateLink
  WHERE parent.name LIKE :name
"""

params = ParametersI()
params.add('name', wrap('idr0004-%'))

rows = query_service.projection(query, params)

plate_ids = [row[0].val for row in rows]

assert plate_ids

# Loop through each field of those plates.

query = """
SELECT id, image.name, image.fileset.id, well.row, well.column, well.plate.name
  FROM WellSample
  WHERE well.plate.id IN (:ids)
"""
Example #39
0
def main(argv=sys.argv):

    # Configure argument parsing
    parser = ArgumentParser(description='''Report number of images imported in
                                           a date range''')
    parser.add_argument('-q', '--quiet', action='store_const', const=True,
                        default=False, help='Do not print output')
    parser.add_argument('-f', '--file', metavar='file',
                        help='Destination CSV file')
    parser.add_argument('-s', '--start', metavar='start',
                        help='Start timestamp')
    parser.add_argument('-e', '--end', metavar='end',
                        help='End timestamp')
    parser.add_argument('-a', '--all', action='store_const', const=True,
                        default=False,
                        help='Complete report. Ignores start/end')
    parser.add_argument('-p', '--period', choices=['year', 'month', 'day'],
                        default='month',
                        help='Period for use in conjunction with -a')
    args = parser.parse_args()

    # Create an OMERO Connection with our basic connection manager
    conn_manager = OMEROConnectionManager()

    if args.all:

        q = '''
            SELECT grp.name,
                   experimenter.omeName,
                   TO_CHAR(event.time, '{period}') AS cal_period,
                   count(event.time)
            FROM Image image
            JOIN image.details.creationEvent event
            JOIN image.details.owner experimenter
            JOIN image.details.group grp
            GROUP BY grp.name,
                     experimenter.omeName,
                     TO_CHAR(event.time, '{period}')
            ORDER BY grp.name,
                     experimenter.omeName,
                     TO_CHAR(event.time, '{period}')
            DESC
            '''

        q = q.format(period=periods[args.period])

        # Run the query
        rows = conn_manager.hql_query(q)
        header = ['Group', 'Username', 'Period', 'Count']

    else:

        params = ParametersI()
        params.map = {}

        start_date = None
        end_date = None

        try:
            if args.start:
                start_date = dateutil.parser.parse(args.start)
            if args.end:
                end_date = dateutil.parser.parse(args.end)
        except ValueError:
            sys.stderr.write('Start and/or end dates have to be parseable!')
            sys.exit(1)

        q = '''
            SELECT grp.name,
                   experimenter.omeName,
                   count(event.time)
            FROM Image image
            JOIN image.details.creationEvent event
            JOIN image.details.owner experimenter
            JOIN image.details.group grp
            '''

        if start_date or end_date:
            q += ' WHERE '

        if start_date:
            q += ' event.time >= :dstart '
            params.map['dstart'] = rtime(unix_time_millis(start_date))

        if start_date and end_date:
            q += ' AND '

        if end_date:
            q += ' event.time <= :dend'
            params.map['dend'] = rtime(unix_time_millis(end_date))

        q += '''
            GROUP BY grp.name,
                     experimenter.omeName
            '''

        # Run the query
        rows = conn_manager.hql_query(q, params)
        header = ['Group', 'Username', 'Count']

    # Print results (if not quieted)
    if args.quiet is False:
        print ', '.join(header)
        for row in rows:
            print ', '.join([str(item) for item in row])

    # Output CSV file (if specified)
    if args.file is not None:
        write_csv(rows,
                  args.file,
                  header)
def read_data(conn):
	imageId = 9
	datasetId = 2
	plateId = -1

	print "\nList Projects:"
	print "=" * 50
	my_expId = conn.getUser().getId()
	for project in conn.listProjects(my_expId):
		print_obj(project)
		for dataset in project.listChildren():
			print_obj(dataset, 2)
			for image in dataset.listChildren():
				print_obj(image, 4)

	print "\nList Datasets:"
	print "=" * 50
	params = ParametersI()
	params.exp(conn.getUser().getId())
	datasets = conn.getObjects("Dataset", params=params)
	for dataset in datasets:
		print_obj(dataset)

	print "\nDataset:%s" % datasetId
	print "=" * 50
	dataset = conn.getObject("Dataset", datasetId)
	print "\nImages in Dataset:", dataset.getName()
	for image in dataset.listChildren():
		print_obj(image)

	image = conn.getObject("Image", imageId)
	print "\nImage:%s" % imageId
	print "=" * 50
	print image.getName(), image.getDescription()
	#Retrieve information about an image
	print "X:", image.getSizeX()
	print "Y:", image.getSizeY()
	print "Z:", image.getSizeZ()
	print "C:", image.getSizeC()
	print "T:", image.getSizeT()
	#render the first timepoint, mid Z section
	z = image.getSizeZ() / 2
	t = 0
	renderedImage = image.renderImage(z, t)
	#renderedImage.show() #popup (use for debug only)
	#renderedImage.save("test.jpg") #save in the currend folder

	print "\nList Screens:"
	print "=" * 50
	for screen in conn.getObjects("Screen"):
		print_obj(screen)
		for plate in screenlistChildren():
			print_obj(plate, 2)
			plateId = plate.getId()

			if plateId >= 0:
				print "\nPlate:%s" % plateId
				print "=" * 50
				plate = conn.getObject("Plate", plateid)
				print "\nNumber ob fields:", plate.getNumberOfFields()
				print "\nGrid size:", plate.getGridSize()
				print "\n Wells in Plate:", plate.getName()
				for well in plate.listChildren():
					index = well.countWellSample()
					print " Well: ", well.row, well.column, " Fields:", index
					for index in xrange(0, index):
						print "  Image: ", \
								well.getImage(index).getName(), \
								well.getImage(index).getId()
Example #41
0
    def sets(self, args):
        """List filesets by various criteria

Filesets are bundles of original data imported into OMERO 5 and above
which represent 1 *or more* images.

Examples:

    bin/omero fs sets --order=newest        # Default
    bin/omero fs sets --order=oldest
    bin/omero fs sets --order=largest
    bin/omero fs sets --without-images      # Corrupt filesets
    bin/omero fs sets --with-transfer=ln_s  # Symlinked filesets
    bin/omero fs sets --check               # Proof the checksums
        """

        from omero.constants.namespaces import NSFILETRANSFER
        from omero_sys_ParametersI import ParametersI
        from omero.rtypes import unwrap

        client = self.ctx.conn(args)
        service = client.sf.getQueryService()
        admin = client.sf.getAdminService()

        if args.check and not admin.getEventContext().isAdmin:
            self.error_admin_only(fatal=True)

        annselect = (
            "(select ann.textValue from Fileset f4 "
            "join f4.annotationLinks fal join fal.child ann "
            "where f4.id = fs.id and ann.ns =:ns) "
        )
        select = (
            "select fs.id, fs.templatePrefix, "
            "(select size(f2.images) from Fileset f2 "
            "where f2.id = fs.id),"
            "(select size(f3.usedFiles) from Fileset f3 "
            "where f3.id = fs.id),"
        ) + annselect
        query1 = "from Fileset fs " "where 1 = 1 "
        query2 = "group by fs.id, fs.templatePrefix "

        if args.order:
            if args.order == "newest":
                query2 += "order by fs.id desc"
            elif args.order == "oldest":
                query2 += "order by fs.id asc"
            elif args.order == "prefix":
                query2 += "order by fs.templatePrefix"

        if args.without_images:
            query = "%s and fs.images is empty %s" % (query1, query2)
        else:
            query = "%s %s" % (query1, query2)

        params = ParametersI()
        params.addString("ns", NSFILETRANSFER)
        count = service.projection("select count(fs) " + query1, params, {"omero.group": "-1"})

        params.page(args.offset, args.limit)
        objs = service.projection(select + query, params, {"omero.group": "-1"})
        objs = unwrap(objs)
        count = unwrap(count)[0][0]

        cols = ["Id", "Prefix", "Images", "Files", "Transfer"]
        if args.check:
            cols.append("Check")
        tb = self._table(args)
        tb.cols(cols)
        tb.page(args.offset, args.limit, count)

        # Map any requested transfers as well
        if args.with_transfer:
            restricted = [TRANSFERS.get(x, x) for x in args.with_transfer[0]]
        else:
            restricted = None

        for idx, obj in enumerate(objs):

            # Map the transfer name to the CLI symbols
            ns = obj[-1]
            if ns is None:
                ns = ""
            elif ns in TRANSFERS:
                ns = TRANSFERS[ns]
            obj[-1] = ns

            # Filter based on the ns symbols
            if restricted and ns not in restricted:
                continue

            # Now perform check if required
            if args.check:
                from omero.grid import RawAccessRequest

                desc, prx = self.get_managed_repo(client)
                ctx = client.getContext(group=-1)
                check_params = ParametersI()
                check_params.addId(obj[0])
                rows = service.projection(
                    (
                        "select h.value, f.hash, "
                        "f.path || '/' || f.name "
                        "from Fileset fs join fs.usedFiles uf "
                        "join uf.originalFile f join f.hasher h "
                        "where fs.id = :id"
                    ),
                    check_params,
                    ctx,
                )

                if not rows:
                    obj.append("Empty")

                err = None
                for row in rows:
                    row = unwrap(row)
                    raw = RawAccessRequest()
                    raw.repoUuid = desc.hash.val
                    raw.command = "checksum"
                    raw.args = map(str, row)
                    try:
                        cb = client.submit(raw)
                        cb.close(True)
                    except CmdError, ce:
                        err = ce.err
                        self.ctx.dbg(err)

                if err:
                    obj.append("ERROR!")
                elif rows:
                    obj.append("OK")

            tb.row(idx, *tuple(obj))
def processImages(client, conn, scriptParams):
    """
    Process the script params to make a list of channel_offsets, then iterate
    through the images creating a new image from each with the specified
    channel offsets
    """

    message = ""

    # Get the images
    objects, logMessage = script_utils.getObjects(conn, scriptParams)
    message += logMessage
    if not objects:
        return None, None, message

    # Concatenate images from datasets
    if scriptParams["Data_Type"] == "Image":
        images = objects
    else:
        images = []
        for ds in objects:
            images += ds.listChildren()

    queryService = conn.getQueryService()
    roiService = conn.getRoiService()

    print "Showing X ; Y coordinates in micrometer"

    for image in images:

         print "---------- {0} ---------- ".format(image.getName())

         metadata = dict(image.loadOriginalMetadata()[1])
         
         params = ParametersI()
         params.addId(image.getId())
         
         roiResult = roiService.findByImage(image.getId(), None)

         for roi in roiResult.rois:
              for shape in roi.copyShapes():
                   if type(shape) != omero.model.PointI:
                        continue
                   
                   # From tem-hole-finder:XYpic2XYstage.m
                   # RotM=1e-9*[tt(1),tt(2);tt(3),tt(4)];
                   # Offset=1e-6*[tt(5),tt(6)];
                   # XYstageHoles=RotM*XYpixHolesOverview'+repmat(Offset',[1,l1]);

                   # TODO: Eval is of course not really safe...
                   tt = eval(metadata['Conversion matrix'])
                   
                   RotM = [x * 1e-9 for x in [tt[0], tt[1], tt[2], tt[3]]]
                   Offset = [x * 1e-6 for x in [tt[4], tt[5]]]

                   xRoi = int(shape.getCx().getValue())
                   yRoi = int(shape.getCy().getValue())

                   stageX = RotM[0] * xRoi + RotM[1] * yRoi + Offset[0]
                   stageY = RotM[2] * xRoi + RotM[3] * yRoi + Offset[1]
                   name = roi.getName().getValue() if roi.getName() is not None else "Unnamed"

                   print "{0} [ {1} ; {2} ] ".format(name, stageX * 1e6, stageY * 1e6)

         print "--------------------------------------"

    return "Finished calculating"
Example #43
0
    def sets(self, args):
        """List filesets by various criteria

Filesets are bundles of original data imported into OMERO 5 and above
which represent 1 *or more* images.

Examples:

    bin/omero fs sets --order=newest        # Default
    bin/omero fs sets --order=oldest
    bin/omero fs sets --order=largest
    bin/omero fs sets --without-images      # Corrupt filesets
    bin/omero fs sets --with-transfer=ln_s  # Symlinked filesets
    bin/omero fs sets --check               # Proof the checksums
        """

        from omero.constants.namespaces import NSFILETRANSFER
        from omero_sys_ParametersI import ParametersI
        from omero.rtypes import unwrap
        from omero.cmd import OK

        client = self.ctx.conn(args)
        service = client.sf.getQueryService()

        select = (
            "select fs.id, fs.templatePrefix, "
            "(select size(f2.images) from Fileset f2 where f2.id = fs.id),"
            "(select size(f3.usedFiles) from Fileset f3 where f3.id = fs.id),"
            "ann.textValue ")
        query1 = (
            "from Fileset fs "
            "left outer join fs.annotationLinks fal "
            "left outer join fal.child ann "
            "where (ann is null or ann.ns = :ns) ")
        query2 = (
            "group by fs.id, fs.templatePrefix, ann.textValue ")

        if args.order:
            if args.order == "newest":
                query2 += "order by fs.id desc"
            elif args.order == "oldest":
                query2 += "order by fs.id asc"
            elif args.order == "prefix":
                query2 += "order by fs.templatePrefix"

        if args.without_images:
            query = "%s and fs.images is empty %s" % (query1, query2)
        else:
            query = "%s %s" % (query1, query2)

        params = ParametersI()
        params.addString("ns", NSFILETRANSFER)
        count = service.projection("select count(fs) " + query1,
                                   params, {"omero.group": "-1"})

        params.page(args.offset, args.limit)
        objs = service.projection(select + query,
                                  params, {"omero.group": "-1"})
        objs = unwrap(objs)
        count = unwrap(count)[0][0]

        cols = ["Id", "Prefix", "Images", "Files", "Transfer"]
        if args.check:
            cols.append("Check")
        tb = self._table(args)
        tb.cols(cols)
        tb.page(args.offset, args.limit, count)
        for idx, obj in enumerate(objs):

            # Map the transfer name to the CLI symbols
            ns = obj[-1]
            if ns is None:
                ns = ""
            elif ns in TRANSFERS:
                ns = TRANSFERS[ns]
            obj[-1] = ns

            # Map any requested transfers as well
            allowed = args.with_transfer is not None \
                and args.with_transfer or []
            for idx, x in enumerate(allowed):
                x = x[0]  # Strip argparse wrapper
                x = TRANSFERS.get(x, x)  # map
                allowed[idx] = x

            # Filter based on the ns symbols
            if allowed:
                if ns not in allowed:
                    continue

            # Now perform check if required
            if args.check:
                from omero.grid import RawAccessRequest
                desc, prx = self.get_managed_repo(client)
                ctx = client.getContext(group=-1)
                check_params = ParametersI()
                check_params.addId(obj[0])
                rows = service.projection((
                    "select h.value, f.hash, "
                    "f.path || '/' || f.name "
                    "from Fileset fs join fs.usedFiles uf "
                    "join uf.originalFile f join f.hasher h "
                    "where fs.id = :id"
                    ), check_params, ctx)

                if not rows:
                    obj.append("Empty")

                err = None
                for row in rows:
                    row = unwrap(row)
                    raw = RawAccessRequest()
                    raw.repoUuid = desc.hash.val
                    raw.command = "checksum"
                    raw.args = map(str, row)
                    cb = client.submit(raw)
                    try:
                        rsp = cb.getResponse()
                        if not isinstance(rsp, OK):
                            err = rsp
                            break
                    finally:
                        cb.close(True)

                if err:
                    obj.append("ERROR!")
                elif rows:
                    obj.append("OK")

            tb.row(idx, *tuple(obj))
        self.ctx.out(str(tb.build()))
    def testQueryTaggedUnique(self):

        # get group we're working on...
        ctx = self.client.sf.getAdminService().getEventContext()
        groupId = ctx.groupId
        print 'groupId', groupId

        # Admin sets permissions to read-ann
        admin = self.root.sf.getAdminService()
        rootUpdate = self.root.sf.getUpdateService()
        gr = admin.getGroup(groupId)
        p = PermissionsI()
        p.setUserRead(True)
        p.setUserWrite(True)
        p.setGroupRead(True)
        p.setGroupAnnotate(True)
        p.setGroupWrite(False)
        p.setWorldRead(False)
        p.setWorldAnnotate(False)
        p.setWorldWrite(False)
        gr.details.permissions = p
        admin.updateGroup(gr)

        # Update context for user
        ctx = self.client.sf.getAdminService().getEventContext()
        update = self.client.sf.getUpdateService()
        queryService = self.client.sf.getQueryService()
        tagCount = 5
        # User creates tag linked to images
        tag = TagAnnotationI()
        tag.textValue = wrap("test_iQuerySpeed")
        links = []

        for i in range(tagCount):
            iid = createImageWithPixels(self.client, self.uuid())
            link = ImageAnnotationLinkI()
            link.parent = ImageI(iid, False)
            link.child = tag
            links.append(link)
        links = update.saveAndReturnArray(links)
        tag = links[0].child
        # check permissions
        p = tag.getDetails().getPermissions()
        assert p.isGroupRead()
        assert p.isGroupAnnotate()

        # Root also links user's tag to images
        rootLinks = []
        for l in links:
            link = ImageAnnotationLinkI()
            link.parent = ImageI(l.parent.id, False)
            link.child = TagAnnotationI(l.child.id, False)
            rootLinks.append(link)
        rootUpdate.saveAndReturnArray(rootLinks, {'omero.group': str(groupId)})

        q = """select distinct new map(obj.id as id,
               obj.name as name,
               obj.details.owner.id as ownerId,
               obj as image_details_permissions,
               obj.fileset.id as filesetId,
               lower(obj.name) as n
             ,
             pix.sizeX as sizeX,
             pix.sizeY as sizeY,
             pix.sizeZ as sizeZ
             )
            from Image obj  left outer join obj.pixels pix
            join obj.annotationLinks alink
            where %s
            order by lower(obj.name), obj.id """

        params = ParametersI()
        params.add('tid', tag.id)

        # We can get all the tagged images like this.
        # We use an additional select statement to give 'unique' results
        uniqueClause = """alink.id = (select max(alink.id)
                from ImageAnnotationLink alink
                where alink.child.id=:tid and alink.parent.id=obj.id)"""
        query = q % uniqueClause
        result1 = queryService.projection(query, params,
                                          {'omero.group': str(groupId)})
        assert len(result1) == tagCount

        # Without the select statement, we get the same image returned
        # multiple times if there is no 'distinct'
        clause = "alink.child.id=:tid"
        query = q % clause
        result2 = queryService.projection(query, params,
                                          {'omero.group': str(groupId)})
        assert len(result2) == tagCount
        for idx in range(len(result1)-1):
            # Omit final since == isn't defined for Ice objects.
            assert result1[idx] == result2[idx]
Example #45
0
def attributes_by_attributes(conn,
                             name="Gene Symbol",
                             value="ASH2L",
                             ns="openmicroscopy.org/mapr/gene",
                             ns2="openmicroscopy.org/mapr/phenotype",
                             name2=None,
                             sId=None
                             ):

    """
    Return a list of neighbours attributes
    for given case insensitive attribute value. (Uses the python blitz gateway)
    """
    from omero.rtypes import rstring, rlist, unwrap
    from omero.sys import ParametersI

    params = ParametersI()
    params.addString("value", value.lower())
    q = (
        "select distinct new map( mv.value as value) "
        "from Annotation as a "
        "join a.mapValue as mv "
        "where lower(mv.value) = :value {where_claus}"
    )
    where_claus = []
    if name:
        params.addString("name", name)
        where_claus.append("and mv.name = :name")
    q = q.format(**{'where_claus':" ".join(where_claus)})

    values = [v[0]['value'] for v in unwrap(
        conn.getQueryService().projection(q, params))]

    params = ParametersI()
    valuelist = [rstring(unicode(v)) for v in values]
    params.add('values', rlist(valuelist))
    params.addString("ns", ns)
    params.addString("ns2", ns2)

    q = (
        "select distinct new map("
            "mv.name as name, "
            "mv.value as value, "
            "mv2.name as name2, "
            "mv2.value as value2) "
        "from Image as i "
        "join i.annotationLinks as ial "
        "join i.annotationLinks as ial2 "
        "join ial.child as a "
        "join a.mapValue as mv "
        "join ial2.child as a2 "
        "join a2.mapValue as mv2 "
        "where a.ns = :ns and a2.ns = :ns2 "
        "and mv.value in (:values) {where_claus} "
    )
    
    where_claus = []
    if name:
        params.addString("name", name)
        where_claus.append("and mv.name = :name")
    if name2:
        params.addString("name2", name2)
        where_claus.append("and mv2.name = :name2")

    q = q.format(**{'where_claus':" ".join(where_claus)})
    
    if sId != None:
        q = q + ("and i in (select image from WellSample "
            "where well.plate in "
            "(select child from ScreenPlateLink where parent.id = {sId}))")

        screenidList = []
        screenidList.append(str(sId))
        q = q.format(**{'sId':" ".join(screenidList)})
    
    res = {}
    for r in unwrap(conn.getQueryService().projection(q, params)):
        r = r[0]
        try:
            res[(r['name'], r['value'])].append((r['name2'], r['value2']))
        except KeyError:
            res[(r['name'], r['value'])] = [(r['name2'], r['value2'])]
    return res
Example #46
0
def stat_plates(query, screen, images=False):

    params = ParametersI()
    params.addString("screen", screen)

    obj = query.findByQuery((
        "select s from Screen s "
        "where s.name = :screen"), params)

    if not obj:
        raise Exception("unknown screen: %s" % screen)

    if images:
        q = ("select %s from Image i "
             "join i.wellSamples ws join ws.well w "
             "join w.plate p join p.screenLinks sl "
             "join sl.parent s where s.name = :screen")

        limit = 1000
        found = 0
        count = unwrap(query.projection(
            q % "count(distinct i.id)", params
        ))[0][0]
        print >>stderr, count
        params.page(0, limit)

        q = q % "distinct i.id"
        q = "%s order by i.id" % q
        while count > 0:
            rv = unwrap(query.projection(q, params))
            count -= len(rv)
            found += len(rv)
            params.page(found, limit)
            for x in rv:
                yield x[0]
        return

    plates = glob(join(screen, "plates", "*"))
    plates = map(basename, plates)

    tb = TableBuilder("Plate")
    tb.cols(["PID", "Wells", "Images"])

    well_count = 0
    image_count = 0
    for plate in plates:
        params.addString("plate", plate)
        rv = unwrap(query.projection((
            "select p.id, count(distinct w.id), count(distinct i.id)"
            "  from Screen s "
            "left outer join s.plateLinks spl join spl.child as p "
            "left outer join p.wells as w "
            "left outer join w.wellSamples as ws "
            "left outer join ws.image as i "
            "where s.name = :screen and p.name = :plate "
            "group by p.id"), params))
        if not rv:
            tb.row(plate, "MISSING", "", "")
        else:
            for x in rv:
                plate_id, wells, images = x
                well_count += wells
                image_count += images
                tb.row(plate, plate_id, wells, images)
    tb.row("Total", "", well_count, image_count)
    print str(tb.build())
def get_images(conn):
    params=ParametersI()
    params.exp(conn.getUser().getId())
    images = conn.getObjects('Image', params=params)
    return images
def get_datasets(conn):
    params = ParametersI()
    params.exp(conn.getUser().getId())
    datasets = conn.getObjects('Dataset', params=params)
    return datasets
def run():
    """
    """
    dataTypes = [rstring("Plate")]

    client = scripts.client(
        "Manage_Plate_Acquisitions.py",
        "Add or remove PlateAcquisition(s) in a given Plate",

        scripts.String("Data_Type", optional=False, grouping="1",
                       description="The data type you want to work with.",
                       values=dataTypes,
                       default="Plate"),

        scripts.List("IDs", optional=False, grouping="2",
                     description="List of Plate IDs").ofType(rlong(0)),

        scripts.String("Mode", optional=False, grouping="3",
                       description="Select if you want to add or "
                                   "remove PlateAcquisitions",
                       values=[rstring("Add"), rstring("Remove")],
                       default="Add"),

        version="0.2",
        authors=["Niko Klaric"],
        institutions=["Glencoe Software Inc."],
        contact="*****@*****.**",
    )

    try:
        scriptParams = {}
        for key in client.getInputKeys():
            if client.getInput(key):
                scriptParams[key] = client.getInput(key, unwrap=True)

        connection = BlitzGateway(client_obj=client)
        updateService = connection.getUpdateService()
        queryService = connection.getQueryService()

        processedMessages = []

        for plateId in scriptParams["IDs"]:
            plateObj = connection.getObject("Plate", plateId)
            if plateObj is None:
                client.setOutput(
                    "Message",
                    rstring("ERROR: No Plate with ID %s" % plateId))
                return

            if scriptParams["Mode"] == "Add":
                plateAcquisitionObj = PlateAcquisitionI()
                plateAcquisitionObj.setPlate(PlateI(plateObj.getId(), False))

                wellGrid = plateObj.getWellGrid()
                for axis in wellGrid:
                    for wellObj in axis:
                        wellSampleList = wellObj.copyWellSamples()
                        plateAcquisitionObj.addAllWellSampleSet(wellSampleList)

                plateAcquisitionObj = updateService.saveAndReturnObject(
                    plateAcquisitionObj)
                plateAcquisitionId = plateAcquisitionObj.getId()._val

                processedMessages.append(
                    "Linked new PlateAcquisition with ID %d"
                    " to Plate with ID %d." % (plateAcquisitionId, plateId))
            else:
                params = ParametersI()
                params.addId(plateId)

                queryString = """
                    FROM PlateAcquisition AS pa
                    LEFT JOIN FETCH pa.wellSample
                    LEFT OUTER JOIN FETCH pa.annotationLinks
                        WHERE pa.plate.id = :id
                    """
                plateAcquisitionList = queryService.findAllByQuery(
                    queryString, params, connection.SERVICE_OPTS)
                if plateAcquisitionList:
                    updateList = []

                    for plate_acquisition in plateAcquisitionList:
                        for well_sample in plate_acquisition.copyWellSample():
                            well_sample.setPlateAcquisition(None)
                            updateList.append(well_sample)

                        updateService.saveArray(updateList)

                        plate_acquisition.clearWellSample()
                        plate_acquisition.clearAnnotationLinks()

                        plate_acquisition = updateService.saveAndReturnObject(
                            plate_acquisition)
                        updateService.deleteObject(plate_acquisition)

                processedMessages.append(
                    "%d PlateAcquisition(s) removed from Plate with ID %d." %
                    (len(plateAcquisitionList), plateId))

        client.setOutput("Message", rstring("No errors. %s" %
                         " ".join(processedMessages)))
    finally:
        client.closeSession()