Пример #1
0
    def setData(self, dataseries, timesteps, labels, timeMin=0, timeMax=0):
        #assert all(len(ds)==len(timesteps) for ds in dataseries),'%s != %s'%(dataseries[0],timesteps)
        if not isIterable(first(timesteps)):
            timesteps = [list(timesteps) for t in range(len(dataseries))]

        lens = [(len(ds), len(ts)) for ds, ts in zip(dataseries, timesteps)]

        if any(ds != ts for ds, ts in lens):
            raise ValueError(
                'Dataseries lengths do not match associated timesteps: %r != %r'
                % tuple(zip(*lens)))

        self.updatedData = True
        self.updatedTime = True
        self.dataseries = dataseries
        self.timesteps = timesteps

        self.labels = labels
        self.dataRange = minmax(matIter(
            self.dataseries)) if len(self.dataseries) > 0 else (0, 0)
        if timeMax == timeMin and isIterable(
                first(timesteps)) and len(self.timesteps[0]) > 0:
            self.timeMin, self.timeMax = minmax(matIter(self.timesteps))
        else:
            self.timeMax = timeMax
            self.timeMin = timeMin
Пример #2
0
        def yieldConnectedOffsets(conoffsetpair, byteorder, compressor):
            connect = first(c for c in conoffsetpair
                            if _get(c, 'Name') == 'connectivity')
            offsets = first(c for c in conoffsetpair
                            if _get(c, 'Name') == 'offsets')
            start = 0
            if connect is not None and len(connect.text.strip()) > 0:
                connect = readArray(connect, byteorder, compressor).tolist()
                offsets = readArray(offsets, byteorder, compressor).tolist()

                for off in offsets:
                    yield connect[start:off]
                    start = off
Пример #3
0
def calculateImageVolume(measure, objs):
    area = calculateArea(measure, objs)
    img = first(o for o in objs if isinstance(o, eidolon.ImageSceneObject))
    if img:
        area *= img.getVoxelSize().z()

    return area
Пример #4
0
 def setDataMatrix(self, matrix, timesteps):
     assert len(matrix) == len(timesteps)
     assert all(len(d) == self.numRegions
                for d in matrix), '%i != %i' % (len(
                    first(matrix)), self.numRegions)
     self.matrix = matrix
     self.timesteps = timesteps
     self._updateTime()
Пример #5
0
    def setActiveObject(self, name):
        for i, v in self.handleNames.items():
            self.handles[i].setActive(v.name == name)

        listitem = first(
            self.uiobj.objectList.findItems(name + ' @ ', Qt.MatchStartsWith))
        if listitem:
            with eidolon.signalBlocker(self.uiobj.objectList):
                self.uiobj.objectList.setCurrentItem(listitem)

        self._repaintDelay()
Пример #6
0
    def testSaveLoadVolume(self):
        '''Test saving and loading a volume image.'''
        f = self.plugin.saveObject(self.vol, self.tempdir)
        eidolon.getSceneMgr().checkFutureResult(f)

        filename = eidolon.first(glob.glob(self.tempdir + '/*'))

        self.assertIsNotNone(filename)

        obj1 = self.plugin.loadObject(
            eidolon.first(glob.glob(self.tempdir + '/*')))[0]
        trans = obj1.getVolumeTransform()

        self.assertEqual(self.vpos, trans.getTranslation())
        self.assertTrue(self.rotatorsEqual(self.vrot, trans.getRotation()))
        self.assertEqual(self.vol.getArrayDims(), obj1.getArrayDims())

        with eidolon.processImageNp(obj1) as arr1:
            self.assertEqual(self.volarr.shape, arr1.shape)

            diff = np.sum(np.abs(self.volarr - arr1))
            self.assertAlmostEqual(diff, 0, 4, '%r is too large' % (diff, ))
Пример #7
0
    def getMeasurementDock(self, obj, w=400, h=400):
        @self.mgr.proxyThreadSafe
        def createWidget():
            widg = self.mgr.create2DView(obj.get(DatafileParams.name),
                                         MeasureSplitView)  #MeasurementView)
            self.dockmap[obj.getName()] = id(widg)
            widg.setSceneObject(obj)
            return widg

        if self.win:
            self.win.sync()
            widg = first(d for d in self.win.dockWidgets
                         if id(d) == self.dockmap.get(obj.getName(), -1))
            return widg or createWidget()
Пример #8
0
    def createRepr(self, obj, reprtype, refine=0, task=None, **kwargs):
        sobj = self.mgr.findObject(obj.get(DatafileParams.srcimage))

        # make a representation of the source object visible if one doesn't already exist
        if isinstance(sobj, eidolon.ImageSceneObject) and not len(sobj.reprs):
            isEmpty = first(self.mgr.enumSceneObjectReprs()) == None
            r = sobj.createRepr(
                eidolon.ReprType._imgtimestack if sobj.
                isTimeDependent else eidolon.ReprType._imgstack)
            self.mgr.addSceneObjectRepr(r)
            if isEmpty:
                self.mgr.setCameraSeeAll()
                self.mgr.repaint()

        return self.getMeasurementDock(obj)
Пример #9
0
        def _loadFile(filename, name, task):
            basename = name or os.path.basename(filename).split('.')[0]
            name = uniqueStr(
                basename, [o.getName() for o in self.mgr.enumSceneObjects()])
            ds = None

            tree = ET.parse(filename)
            root = tree.getroot()
            unstruc = root.find('UnstructuredGrid')
            poly = root.find('PolyData')
            appended = root.find('AppendedData')
            compressor = _get(root, 'compressor')
            byteorder = '<' if root.get(
                'byte_order') == 'LittleEndian' else '>'

            #if appended and _get(appended,'encoding').lower()=='base64':
            #    appended=base64.decodestring(root.find('AppendedData').text)

            if unstruc is not None:
                pieces = list(unstruc)

                points = pieces[0].find('Points')
                cells = pieces[0].find('Cells')
                celldata = pieces[0].find('CellData')
                pointdata = pieces[0].find('PointData')
                nodearray = points.find('DataArray')

                if celldata is None:
                    celldata = []
                if pointdata is None:
                    pointdata = []

                nodes = readNodes(nodearray, byteorder, compressor)

                connectivity = first(
                    i for i in cells
                    if i.get('Name').lower() == 'connectivity')
                types = first(i for i in cells
                              if i.get('Name').lower() == 'types')
                offsets = first(i for i in cells
                                if i.get('Name').lower() == 'offsets')

                indlist = readArray(
                    connectivity, byteorder,
                    compressor).tolist()  # faster as Python list?
                fields = readFields(celldata, pointdata, byteorder, compressor)

                celltypes = readArray(types, byteorder, compressor)
                offlist = readArray(offsets, byteorder, compressor)
                cellofflist = np.vstack((celltypes, offlist)).T.tolist(
                )  # pair each cell type entry with its width entry

                assert len(celltypes) == len(offlist)

                # map cell type IDs to IndexMatrix objects for containing the indices of that type and node ordering list
                indmats = {
                    i: (IndexMatrix(n + 'Inds', e, 0, len(s)), s)
                    for n, i, e, s in CellTypes
                }

                for celltype, off in cellofflist:
                    indmat, _ = indmats.get(celltype, (None, []))
                    if indmat is not None:  # only found for those cell types we understand (ie. not polygon)
                        indmat.append(*indlist[off - indmat.m():off])

                inds = []
                for ind, order in indmats.values(
                ):  # collect and reorder all non-empty index matrices
                    if ind.n() > 0:
                        ind[:, :] = np.asarray(
                            ind
                        )[:,
                          order]  # reorder columns to match CHeart node ordering
                        inds.append(ind)

                ds = PyDataSet('vtk', nodes, inds, fields)

            elif poly is not None:
                pieces = list(poly)

                #numPoints=int(_get(pieces[0],'NumberOfPoints')

                points = pieces[0].find('Points')
                celldata = pieces[0].find('CellData')
                pointdata = pieces[0].find('PointData')
                nodearray = points.find('DataArray')
                nodes = readNodes(nodearray, byteorder, compressor)
                inds = []

                lines = IndexMatrix('lines', ElemType._Line1NL, 0, 2)
                tris = IndexMatrix('tris', ElemType._Tri1NL, 0, 3)
                quads = IndexMatrix('quads', ElemType._Quad1NL, 0, 4)

                for a, b in yieldConnectedOffsets(pieces[0].find('Lines'),
                                                  byteorder, compressor):
                    lines.append(a, b)

                for strip in yieldConnectedOffsets(pieces[0].find('Strips'),
                                                   byteorder, compressor):
                    for a, b, c in eidolon.successive(strip, 3):
                        tris.append(a, b, c)

                for poly in yieldConnectedOffsets(pieces[0].find('Polys'),
                                                  byteorder, compressor):
                    if len(poly) == 2:
                        lines.append(*poly)
                    elif len(poly) == 3:
                        tris.append(*poly)
                    elif len(poly) == 4:
                        quads.append(*poly)

                    # TODO: read in arbitrary polygon and triangulate?

                if len(lines) > 0:
                    inds.append(lines)

                if len(tris) > 0:
                    inds.append(tris)

                if len(quads) > 0:
                    quads[:, :] = np.asarray(quads)[:, CellTypes.Quad[-1]]
                    inds.append(quads)

                fields = readFields(celldata, pointdata, byteorder, compressor)

                ds = PyDataSet('vtk', nodes, inds, fields)
            else:
                raise NotImplementedError('Dataset not understood yet')

            f.setObject(
                MeshSceneObject(name,
                                ds,
                                self,
                                filename=filename,
                                isXML=True,
                                descdata=''))
Пример #10
0
        def _saveFile(filename, ds, datasettype, desc, version, task):
            with f:
                nodes = ds.getNodes()
                with open(filename, 'w') as o:
                    o.write(
                        '# vtk DataFile Version %.1f\n%s\nASCII\nDATASET %s' %
                        (version, desc, datasettype))

                    if datasettype == DatasetTypes._STRUCTURED_GRID:
                        griddims = eval(ds.meta(VTKProps._griddims))
                        o.write(' %i %i %i' % griddims)
                    o.write('\n')

                    # write out points
                    o.write('POINTS %i double\n' % nodes.n())
                    for n in range(nodes.n()):
                        o.write('%f %f %f\n' % vecfunc(nodes.getAt(n)))

                    # write out the extra components for unstructured grids
                    if datasettype == DatasetTypes._UNSTRUCTURED_GRID:
                        cells = []
                        celltypes = []
                        for inds in ds.indices.values():
                            tname, cid, sortinds = first(
                                (n, i, s) for n, i, e, s in CellTypes
                                if e == inds.getType()) or (None, None, None)

                            if tname == CellTypes._Poly:
                                polyinds = ds.getIndexSet(
                                    inds.meta(VTKProps._polyinds))
                                celltypes += [cid] * polyinds.n()

                                for p in range(polyinds.n()):
                                    start, end = polyinds.getRow(p)
                                    poly = tuple(
                                        inds.getAt(i)
                                        for i in range(start, end))
                                    cells.append(poly)
                            elif tname != None:
                                #unsortinds=list(reversed(indexList(sortinds,list(reversed(range(len(sortinds)))))))
                                unsortinds = eidolon.indexList(
                                    sortinds, list(range(len(sortinds))))

                                celltypes += [cid] * inds.n()
                                for ind in range(inds.n()):
                                    cells.append(
                                        eidolon.indexList(
                                            unsortinds, inds.getRow(ind)))

                        if len(cells) > 0:
                            o.write(
                                'CELLS %i %i\n' %
                                (len(cells), sum(len(c) + 1 for c in cells)))
                            for c in cells:
                                o.write(' '.join(map(str, [len(c)] +
                                                     list(c))) + '\n')

                            o.write('CELL_TYPES %i\n' % len(celltypes))
                            o.write('\n'.join(map(str, celltypes)) + '\n')

                    # write out fields as POINT_DATA, CELL_DATA is not supported
                    fields = list(ds.fields.values()) if writeFields else []
                    if len(fields) > 0:
                        o.write('POINT_DATA %i\n' % nodes.n())

                    for dat in fields:
                        atype = dat.meta(
                            VTKProps._attrtype) or AttrTypes._SCALARS
                        name = dat.getName()

                        if atype in (AttrTypes._SCALARS, AttrTypes._VECTORS,
                                     AttrTypes._NORMALS, AttrTypes._TENSORS):
                            o.write(
                                '%s %s float\n' % (atype, name)
                            )  # scalars doesn't preserve any lookup table components
                        elif atype in (AttrTypes._TEXTURE_COORDINATES,
                                       AttrTypes._COLOR_SCALARS):
                            dtype = ' float' if atype == AttrTypes._TEXTURE_COORDINATES else ''
                            o.write('%s %s %s\n' % (atype, dat.m(), dtype))
                        elif atype == AttrTypes._LOOKUP_TABLE:
                            o.write('%s %i\n' % (atype, dat.n()))
                        else:
                            continue  # skips field matrices if these get stored

                        for n in range(dat.n()):
                            o.write(' '.join(map(str, dat.getRow(n))) + '\n')

                f.setObject(filename)
Пример #11
0
        def _loadFile(filename, name, strdata, task):
            result = self.parseString(strdata or open(filename).read())

            basename = name or os.path.basename(filename).split('.')[0]
            name = uniqueStr(
                basename, [o.getName() for o in self.mgr.enumSceneObjects()])

            version, desc, data = result[:3]
            pointattrs = [a for a in result[3:] if a[0] == 'POINT_DATA']
            cellattrs = [a for a in result[3:] if a[0] == 'CELL_DATA']

            ds = None
            indmats = []
            metamap = {
                VTKProps.desc: desc,
                VTKProps.version: str(version),
                VTKProps.datasettype: data[0]
            }

            # interpret dataset blocks
            if data[0] == DatasetTypes._UNSTRUCTURED_GRID:
                nodes, cells, celltypes = data[1:]

                # map cell types to the indices of members of `cells' of that type
                typeindices = {}
                for i in range(celltypes.n()):
                    typeindices.setdefault(celltypes.getAt(i), []).append(i)

                for ctype, inds in typeindices.items():
                    tname, elemtypename, sortinds = first(
                        (n, e, s) for n, i, e, s in CellTypes
                        if i == ctype) or (None, None, None)
                    matname = '' if tname == None else uniqueStr(
                        tname, [i.getName() for i in indmats], '')
                    if tname == CellTypes._Poly:
                        mat = IndexMatrix(matname, elemtypename, 0)
                        polyinds = IndexMatrix(matname + 'Inds',
                                               VTKProps._polyinds, 0, 2)
                        mat.meta(VTKProps._polyinds, polyinds.getName())
                        indmats.append(mat)
                        indmats.append(polyinds)

                        for ind in inds:
                            row = cells.getRow(ind)
                            length = row[0]
                            polyinds.append(mat.n(), mat.n() + length)
                            for r in row[1:length + 1]:
                                mat.append(r)

                    elif tname != None:
                        elemtype = ElemType[elemtypename]
                        mat = IndexMatrix(matname, elemtypename, 0,
                                          elemtype.numNodes())
                        indmats.append(mat)
                        for ind in inds:
                            sortedinds = eidolon.indexList(
                                sortinds,
                                cells.getRow(ind)[1:])
                            mat.append(*sortedinds)

            elif data[0] == DatasetTypes._STRUCTURED_GRID:
                dims, nodes = data[1:]
                dimx, dimy, dimz = map(int, dims)

                assert dimx > 1
                assert dimy > 1
                assert dimz > 1

                _, inds = eidolon.generateHexBox(dimx - 2, dimy - 2, dimz - 2)

                inds = eidolon.listToMatrix(inds, 'hexes')
                inds.setType(ElemType._Hex1NL)

                indmats = [inds]
                metamap[VTKProps._griddims] = repr((dimx, dimy, dimz))

            elif data[0] == DatasetTypes._POLYDATA:
                nodes = data[1]
                polyelems = data[2:]

                lines = IndexMatrix('lines', ElemType._Line1NL, 0, 2)
                tris = IndexMatrix('tris', ElemType._Tri1NL, 0, 3)
                quads = IndexMatrix('quads', ElemType._Quad1NL, 0, 4)

                for pname, numelems, numvals, ind in polyelems:
                    n = 0
                    if pname == 'POLYGONS':
                        while n < ind.n():
                            polylen = ind.getAt(n)
                            if polylen == 2:
                                lines.append(ind.getAt(n + 1),
                                             ind.getAt(n + 2))
                            elif polylen == 3:
                                tris.append(ind.getAt(n + 1), ind.getAt(n + 2),
                                            ind.getAt(n + 3))
                            elif polylen == 4:
                                quads.append(ind.getAt(n + 1),
                                             ind.getAt(n + 2),
                                             ind.getAt(n + 4),
                                             ind.getAt(n + 3))

                            n += polylen + 1

                if len(tris) > 0:
                    indmats.append(tris)
                if len(quads) > 0:
                    indmats.append(quads)
                if len(lines) > 0:
                    indmats.append(lines)
            else:
                raise NotImplementedError(
                    'Dataset type %s not understood yet' % str(data[0]))

            ds = PyDataSet('vtk', nodes, indmats)
            for k, v in metamap.items():
                ds.meta(k, v)

            # read attributes into fields
            for attr in list(pointattrs) + list(cellattrs):
                for attrtype in attr[2:]:
                    atype = str(attrtype[0])

                    spatialname = first(
                        ds.indices.keys())  # TODO: choose a better topology

                    if atype == AttrTypes._FIELD:
                        for fname, width, length, dtype, dat in attrtype[3:]:
                            assert (width * length) == dat.n()
                            assert length == nodes.n(
                            ) or length == ds.indices[spatialname].n()

                            dat.setName(fname)
                            dat.setM(width)
                            dat.meta(StdProps._topology, spatialname)
                            dat.meta(StdProps._spatial, spatialname)
                            dat.meta(VTKProps._attrtype, atype)
                            ds.setDataField(dat)
                    else:
                        dat = attrtype[-1]
                        dat.setName(str(attrtype[1]))
                        dat.meta(StdProps._topology, spatialname)
                        dat.meta(StdProps._spatial, spatialname)
                        dat.meta(VTKProps._attrtype, atype)
                        ds.setDataField(dat)

                        if atype in (AttrTypes._NORMALS, AttrTypes._VECTORS):
                            dat.setM(3)
                        elif atype == AttrTypes._LOOKUP_TABLE:
                            dat.setM(4)
                        elif atype == AttrTypes._TENSORS:
                            dat.setM(9)
                        elif atype in (AttrTypes._TEXTURE_COORDINATES,
                                       AttrTypes._COLOR_SCALARS):
                            dat.setM(attrtype[2])
                        elif atype == AttrTypes._SCALARS:
                            if isinstance(attrtype[3], int):
                                dat.setM(attrtype[3])
                            if attrtype[3] == AttrTypes._LOOKUP_TABLE:
                                dat.meta(AttrTypes._LOOKUP_TABLE,
                                         str(attrtype[4]))
                            elif attrtype[4] == AttrTypes._LOOKUP_TABLE:
                                dat.meta(AttrTypes._LOOKUP_TABLE,
                                         str(attrtype[5]))

            try:
                descdata = eval(
                    desc
                )  # if desc is a Python object (eg. timestep number) attempt to evaluate it
            except:
                descdata = desc  # just a normal string

            f.setObject(
                MeshSceneObject(name,
                                ds,
                                self,
                                filename=filename,
                                descdata=descdata,
                                result=result))
Пример #12
0
 def getPlotObjectWidget(self, obj):
     return first(w for w in self.win.dockWidgets
                  if id(w) == self.dockmap.get(obj.getName(), -1))
Пример #13
0
 def getActiveIndex(self):
     return first(i for i in self.handleNames
                  if i < len(self.handles) and self.handles[i].isActive())
Пример #14
0
def importMeshes(x4):
    '''Import meshes from the X4DF object `x4', returning a list of MeshSceneObject instances.'''
    arrs = {a.name: a for a in x4.arrays}
    results = []

    for m in x4.meshes:
        name, ts, mnodes, topos, dfs, _ = m
        topomats = []
        dss = []
        timesteps = [0]
        filenames = []
        dfmap = dict()

        for df in dfs:
            dfmap.setdefault(df.name, []).append(df)

        # sort fields by timestep
        for dfn in dfmap:
            dfmap[dfn] = sorted(dfmap[dfn], key=lambda i: (i.timestep or 0))

        # determine timestep from timescheme value, node timestep values, or field timestep values
        if len(mnodes) > 1 or ts or len(dfs) > 1:
            if ts:  # convert timescheme to timestep list
                timesteps = frange(ts[0], ts[1] * len(mnodes), ts[1])
            elif len(mnodes) > 1:
                timesteps = [n.timestep or i for i, n in enumerate(mnodes)]
            else:
                timedf = first(dfs for dfs in dfmap.values() if len(dfs) > 1)
                if timedf:
                    timesteps = [
                        df.timestep or i for i, df in enumerate(timedf)
                    ]

        assert len(timesteps) == len(mnodes) or len(timesteps) == len(
            first(dfmap.values()))

        # read topologies in first, these get copied between timesteps
        for t in topos:
            tname, tsrc, et, spatial, _ = t
            arr = array2MatrixForm(arrs[tsrc].data, np.uint32)
            tmat = eidolon.IndexMatrix(tname, et or '', *arr.shape)
            filenames.append(arrs[tsrc].filename)

            if spatial:
                tmat.meta(StdProps._spatial, spatial)
                tmat.meta(StdProps._isspatial, 'False')
            else:
                tmat.meta(StdProps._isspatial, 'True')

            np.asarray(tmat)[:, :] = arr
            topomats.append(tmat)

        # read each timestep, first copying the nodes then the field for the timestep or cloning static fields
        for i in range(len(timesteps)):
            fields = []
            arr = arrs[mnodes[i].src].data
            initnodes = arrs.get(mnodes[i].initialnodes,
                                 0)  # get initial nodes or default 0
            filenames.append(arrs[mnodes[i].src].filename)

            nmat = eidolon.Vec3Matrix('nodes%i' % i, arr.shape[0])
            np.asarray(nmat)[:, :] = array2MatrixForm(arr + initnodes,
                                                      np.double)

            # read in each field, there will be a separate entry for this timestep or a single entry that is copied for each timestep
            for dfs in dfmap.values():
                findex = 0 if len(
                    dfs
                ) == 1 else i  # choose the first field value if this field is static, every timestep gets a copy
                fname, src, _, ftopo, fspatial, fieldtype, _ = dfs[findex]
                arr = array2MatrixForm(arrs[src].data, np.double)
                filenames.append(arrs[src].filename)

                fmat = eidolon.RealMatrix(fname, *arr.shape)
                fmat.meta(StdProps._topology, ftopo)
                fmat.meta(StdProps._spatial, fspatial)
                fmat.meta(StdProps._elemdata,
                          str(fieldtype == validFieldTypes[0]))
                fmat.meta(StdProps._timecopy, str(len(dfs) > 1))

                np.asarray(fmat)[:, :] = arr
                fields.append(fmat)

            dss.append(
                eidolon.PyDataSet('%s%i' % (name, i), nmat, topomats, fields))

        obj = MeshSceneObject(name,
                              dss,
                              filenames=list(filter(bool, filenames)))

        # set timestep list if needed
        if len(timesteps) > 1:
            obj.setTimestepList(list(map(ast.literal_eval, timesteps)))

        results.append(obj)

    return results
Пример #15
0
from eidolon import ImageSceneObject, processImageNp, trange, first, rescaleArray, getSceneMgr

import numpy as np

from netclient import InferenceClient

# local variables set using --var command line options
localhost = locals().get('host', '0.0.0.0')
localport = locals().get('port', '5000')
localcont = locals().get('container', 'echo')

client = InferenceClient(localhost, int(localport))

if __name__ == 'builtins':
    o = mgr.win.getSelectedObject() or first(mgr.objs)

    if o is None:
        mgr.showMsg(
            'Load and select an image object before executing this script')
    elif not isinstance(o, ImageSceneObject):
        mgr.showMsg('Selected object %r is not an image' % o.getName())
    else:
        oo = o.plugin.clone(o, o.getName() + '_Seg')

        with processImageNp(oo, True) as m:
            data = rescaleArray(m, 0,
                                np.iinfo(np.uint16).max).astype(np.uint16)
            m[...] = client.inferImageVolume(localcont, data)

        mgr.addSceneObject(oo)