def testColumnSpaceNames(self): grid = self.client.sf.sharedResources() repoMap = grid.repositories() repoObj = repoMap.descriptions[0] table = grid.newTable(repoObj.id.val, "/testColumnSpaceNames") assert table cols = [] lc = columns.LongColumnI("Long column", None, [1]) sc = columns.StringColumnI("String column", None, 4) cols.append(lc) cols.append(sc) table.initialize(cols) lc.values = [1, 2, 3, 4] sc.values = ["foo", "bar", "foo", "spam"] table.addData(cols) assert [1, 2, 3] == table.getWhereList("x>1", {'x': rstring("Long column")}, 0, 4, 0) assert [1] == table.getWhereList("x=='bar'", {'x': rstring("String column")}, 0, 4, 0) assert [0, 2] == table.getWhereList("x=='foo'", {'x': rstring("String column")}, 0, 4, 0) assert [1, 2] == table.getWhereList("(x>1)&(y!='spam')", { 'x': rstring("Long column"), 'y': rstring("String column") }, 0, 4, 0) table.delete() table.close()
def test2098(self): """ Creates and downloads an HDF file and checks that its size and hash match whats in the db """ grid = self.client.sf.sharedResources() table = grid.newTable(1, "/test") assert table lc = columns.LongColumnI('lc', 'desc', [1]) file = None try: file = table.getOriginalFile() assert file table.initialize([lc]) table.addData([lc]) finally: # Not deleting since queried table.close() # Reload the file file = self.client.sf.getQueryService().get("OriginalFile", file.id.val) # Check values p = path.path(self.tmpfile()) self.client.download(file, str(p)) assert p.size == file.size.val
def testReadOnlyFile(self): """ Create an HDF5 file on the server, and then mark it read-only. The server should still allow you to load & read that file. """ self.testBlankTable() # ofile filename = self.unique_dir + "/file.txt" mrepo = self.get_managed_repo() assert not mrepo.fileExists(filename) self.create_file(mrepo, filename) assert mrepo.fileExists(filename) assert "file.txt" in mrepo.list(self.unique_dir)[0] grid = self.client.sf.sharedResources() repoMap = grid.repositories() repoObj = repoMap.descriptions[0] table = grid.newTable(repoObj.id.val, "/test") assert table lcol = columns.LongColumnI('longcol', 'long col') table.initialize([lcol]) table.setMetadata('test', wrap('test')) tid = unwrap(table.getOriginalFile().getId()) table.close() # Mark the file as read only # wip: read_only = self.raw("read-only", [file_path]) table = grid.openTable(omero.model.OriginalFileI(tid)) assert table table.delete() table.close()
def testCanReadInternalMetadata(self): grid = self.client.sf.sharedResources() repoMap = grid.repositories() repoObj = repoMap.descriptions[0] table = grid.newTable(repoObj.id.val, "/testInternalMetadata.h5") table.initialize([columns.LongColumnI('lc')]) assert table.getMetadata("__version") table.delete() table.close()
def testGetHeaders(self): """ Check all required fields are included in the headers """ grid = self.client.sf.sharedResources() table = grid.newTable(1, "/test") assert table cols = [ columns.LongColumnI('no desc'), columns.LongColumnI('scalar', 'scalar desc'), columns.LongArrayColumnI('array', 'array desc', 3) ] table.initialize(cols) h = table.getHeaders() assert len(h) == 3 assert (h[0].name, h[0].description) == ('no desc', '') assert (h[1].name, h[1].description) == ('scalar', 'scalar desc') assert (h[2].name, h[2].description, h[2].size) == ('array', 'array desc', 3) table.delete() table.close()
def test4000TableRead(self): """ Tests that empty or zero (ice default) values for stop are translated appropriately. """ grid = self.client.sf.sharedResources() table = grid.newTable(1, "/test") assert table lc = columns.LongColumnI('lc', 'desc', [123]) table.initialize([lc]) table.addData([lc]) assert [123] == table.read([0], 0, 0).columns[0].values
def testCanWriteAlmostInternalMetadata(self, data): grid = self.client.sf.sharedResources() repoMap = grid.repositories() repoObj = repoMap.descriptions[0] table = grid.newTable(repoObj.id.val, "/testInternalMetadata.h5") table.initialize([columns.LongColumnI('lc')]) if isinstance(data, dict): table.setAllMetadata(data) else: table.setMetadata(*data) assert "4" == table.getMetadata("version").val table.delete() table.close()
def testCantWriteInternalMetadata(self, data): grid = self.client.sf.sharedResources() repoMap = grid.repositories() repoObj = repoMap.descriptions[0] table = grid.newTable(repoObj.id.val, "/testInternalMetadata.h5") table.initialize([columns.LongColumnI('lc')]) with pytest.raises(omero.ApiUsageException): if isinstance(data, dict): table.setAllMetadata(data) else: table.setMetadata(*data) table.delete() table.close()
def test3714GetWhereListVars(self): """ Tests that variables are correctly unwrapped after transport """ grid = self.client.sf.sharedResources() table = grid.newTable(1, "/test") assert table lc = columns.LongColumnI('lc', 'desc', [1]) table.initialize([lc]) table.addData([lc]) assert [0] == table.getWhereList('(lc==var)', {"var": rlong(1)}, 0, 0, 0)
def test12606fileSizeCheck(self): """ Close may write additional data to a table after a flush, this is most likely to occur for very small writes such as attribute changes """ grid = self.client.sf.sharedResources() repoMap = grid.repositories() repoObj = repoMap.descriptions[0] table = grid.newTable(repoObj.id.val, "/test") assert table lcol = columns.LongColumnI('longcol', 'long col') table.initialize([lcol]) table.setMetadata('test', wrap('test')) tid = unwrap(table.getOriginalFile().getId()) table.close() table = grid.openTable(omero.model.OriginalFileI(tid)) assert table table.close()
def test10431uninitialisedTableReadWrite(self): """ Return an error when attempting to read/write an uninitialised table """ grid = self.client.sf.sharedResources() repoMap = grid.repositories() repoObj = repoMap.descriptions[0] table = grid.newTable(repoObj.id.val, "/test") assert table lcol = columns.LongColumnI('longcol', 'long col') with pytest.raises(omero.ApiUsageException): table.addData([lcol]) with pytest.raises(omero.ApiUsageException): table.read([0], 0, 0) with pytest.raises(omero.ApiUsageException): table.slice([], []) with pytest.raises(omero.ApiUsageException): table.getWhereList('', None, 0, 0, 0)
def test2855MetadataMethods(self): """ Tests the various metadata methods for a table """ grid = self.client.sf.sharedResources() table = grid.newTable(1, "/test") assert table def clean(m): """ Unwraps the RTypes for easier processing and gets rid of auto-generated values for easier testing. """ m = unwrap(m) del m["initialized"] del m["version"] return m try: print table.getOriginalFile().id.val lc = columns.LongColumnI('lc', 'desc', [1]) table.initialize([lc]) assert len(clean(table.getAllMetadata())) == 0 # Set a string table.setMetadata("s", rstring("b")) assert "b" == unwrap(table.getMetadata("s")) assert {"s": "b"} == clean(table.getAllMetadata()) # Set an int table.setMetadata("i", rint(1)) assert 1 == unwrap(table.getMetadata("i")) assert {"s": "b", "i": 1} == clean(table.getAllMetadata()) # Set a float table.setMetadata("f", rfloat(1)) assert 1 == unwrap(table.getMetadata("f")) assert {"s": "b", "i": 1, "f": 1} == clean(table.getAllMetadata()) finally: table.close()
def testCreateAllColumnsAndMetadata_5_3_4(self): """ Call this method to create the reference HDF5 table under a 5.3.4 Python 2.7 server. The OriginalFile ID of the table will be printed, and can be used to find the file under ${omero.data.dir}/Files/. Alternatively download it using ``omero download OriginalFile:FileID output.h5`` To run manually goto ``components/tools/OmeroPy``, and run: ``pytest test/integration/tablestest/test_backwards_compatibility.py\ -s -k testCreateAllColumnsAndMetadata_5_3_4`` """ grid = self.client.sf.sharedResources() repoMap = grid.repositories() repoObj = repoMap.descriptions[0] table = grid.newTable(repoObj.id.val, "/test") assert table # Supported metadata types # https://github.com/ome/omero-py/blob/v5.5.1/src/omero/hdfstorageV2.py#L466 metadata = { 'string': rstring('a'), 'int': rint(1), 'long': rlong(1), 'double': rfloat(0.1), } fcol = columns.FileColumnI('filecol', 'file col') fcol.values = [10, 20] icol = columns.ImageColumnI('imagecol', 'image col') icol.values = [30, 40] rcol = columns.RoiColumnI('roicol', 'roi col') rcol.values = [50, 60] wcol = columns.WellColumnI('wellcol', 'well col') wcol.values = [70, 80] pcol = columns.PlateColumnI('platecol', 'plate col') pcol.values = [90, 100] bcol = columns.BoolColumnI('boolcol', 'bool col') bcol.values = [True, False] dcol = columns.DoubleColumnI('doublecol', 'double col') dcol.values = [0.25, 0.5] lcol = columns.LongColumnI('longcol', 'long col') lcol.values = [-1, -2] scol = columns.StringColumnI('stringcol', 'string col', 3) scol.values = ["abc", "de"] larr = columns.LongArrayColumnI('longarr', 'longarr col', 2) larr.values = [[-2, -1], [1, 2]] farr = columns.FloatArrayColumnI('floatarr', 'floatarr col', 2) farr.values = [[-0.25, -0.5], [0.125, 0.0625]] darr = columns.DoubleArrayColumnI('doublearr', 'doublearr col', 2) darr.values = [[-0.25, -0.5], [0.125, 0.0625]] # DatasetColumn is broken! mask = self.createMaskCol() cols = [ fcol, icol, rcol, wcol, pcol, bcol, dcol, lcol, scol, mask, larr, farr, darr ] table.initialize(cols) table.setAllMetadata(metadata) table.addData(cols) data = table.readCoordinates([0, 1]) testf = data.columns[0].values assert 10 == testf[0] assert 20 == testf[1] testi = data.columns[1].values assert 30 == testi[0] assert 40 == testi[1] testr = data.columns[2].values assert 50 == testr[0] assert 60 == testr[1] testw = data.columns[3].values assert 70 == testw[0] assert 80 == testw[1] testp = data.columns[4].values assert 90 == testp[0] assert 100 == testp[1] testb = data.columns[5].values assert testb[0] assert not testb[1] testd = data.columns[6].values assert 0.25 == testd[0] assert 0.5 == testd[1] testl = data.columns[7].values assert -1 == testl[0] assert -2 == testl[1] tests = data.columns[8].values assert "abc" == tests[0] assert "de" == tests[1] testm = data.columns[9] self.checkMaskCol(testm) testla = data.columns[10].values assert [-2, -1] == testla[0] assert [1, 2] == testla[1] testfa = data.columns[11].values assert [-0.25, -0.5] == testfa[0] assert [0.125, 0.0625] == testfa[1] testda = data.columns[12].values assert [-0.25, -0.5] == testda[0] assert [0.125, 0.0625] == testda[1] ofile = table.getOriginalFile() print("Created OriginalFile:", ofile.getId().val) table.close()
def test2855MetadataMethods(self): """ Tests the various metadata methods for a table """ grid = self.client.sf.sharedResources() table = grid.newTable(1, "/test") assert table def clean(m): """ Unwraps the RTypes for easier processing and gets rid of auto-generated values for easier testing. """ m = unwrap(m) assert "__initialized" in m assert "__version" in m del m["__initialized"] del m["__version"] return m try: print(table.getOriginalFile().id.val) lc = columns.LongColumnI('lc', 'desc', [1]) table.initialize([lc]) assert len(clean(table.getAllMetadata())) == 0 # Set a string table.setMetadata("s", rstring("b")) assert "b" == unwrap(table.getMetadata("s")) assert {"s": "b"} == clean(table.getAllMetadata()) # Set an int table.setMetadata("i", rint(1)) assert 1 == unwrap(table.getMetadata("i")) assert {"s": "b", "i": 1} == clean(table.getAllMetadata()) # Set a float table.setMetadata("f", rfloat(1)) assert 1 == unwrap(table.getMetadata("f")) assert {"s": "b", "i": 1, "f": 1} == clean(table.getAllMetadata()) # Replace all user-metadata table.setAllMetadata({"s2": rstring("b2"), "l2": rlong(3)}) assert {"s2": "b2", "l2": 3} == clean(table.getAllMetadata()) assert table.getMetadata("s") is None table.setAllMetadata({}) assert {} == clean(table.getAllMetadata()) table.setMetadata("z", rint(1)) with pytest.raises(omero.ApiUsageException): table.setMetadata("__z", rint(2)) assert {"z": 1} == clean(table.getAllMetadata()) with pytest.raises(omero.ValidationException): table.setMetadata("z", rint(None)) finally: table.delete() table.close()
def testCreateAllColumns_4_4_5(self): """ Call this method to create the reference HDF5 table under a 4.4.5 or older server. The OriginalFile ID of the table will be printed, and can be used to find the file under ${omero.data.dir}/Files/. To run manually goto ``components/tools/OmeroPy``, and run: ``py.test test/integration/tablestest/test_backwards_compatibility.py\ -s -k testCreateAllColumns_4_4_5`` """ grid = self.client.sf.sharedResources() repoMap = grid.repositories() repoObj = repoMap.descriptions[0] table = grid.newTable(repoObj.id.val, "/test") assert table fcol = columns.FileColumnI('filecol', 'file col') fcol.values = [10, 20] icol = columns.ImageColumnI('imagecol', 'image col') icol.values = [30, 40] rcol = columns.RoiColumnI('roicol', 'roi col') rcol.values = [50, 60] wcol = columns.WellColumnI('wellcol', 'well col') wcol.values = [70, 80] pcol = columns.PlateColumnI('platecol', 'plate col') pcol.values = [90, 100] bcol = columns.BoolColumnI('boolcol', 'bool col') bcol.values = [True, False] dcol = columns.DoubleColumnI('doublecol', 'double col') dcol.values = [0.25, 0.5] lcol = columns.LongColumnI('longcol', 'long col') lcol.values = [-1, -2] scol = columns.StringColumnI('stringcol', 'string col', 3) scol.values = ["abc", "de"] # larr = columns.LongArrayColumnI('longarr', 'longarr col', 2) # larr.values = [[-2, -1], [1, 2]] # farr = columns.FloatArrayColumnI('floatarr', 'floatarr col', 2) # farr.values = [[-0.25, -0.5], [0.125, 0.0625]] # darr = columns.DoubleArrayColumnI('doublearr', 'doublearr col', 2) # darr.values = [[-0.25, -0.5], [0.125, 0.0625]] mask = self.createMaskCol() cols = [fcol, icol, rcol, wcol, pcol, bcol, dcol, lcol, scol, mask] # larr, farr, darr] table.initialize(cols) table.addData(cols) data = table.readCoordinates([0, 1]) testf = data.columns[0].values assert 10 == testf[0] assert 20 == testf[1] testi = data.columns[1].values assert 30 == testi[0] assert 40 == testi[1] testr = data.columns[2].values assert 50 == testr[0] assert 60 == testr[1] testw = data.columns[3].values assert 70 == testw[0] assert 80 == testw[1] testp = data.columns[4].values assert 90 == testp[0] assert 100 == testp[1] testb = data.columns[5].values assert testb[0] assert not testb[1] testd = data.columns[6].values assert 0.25 == testd[0] assert 0.5 == testd[1] testl = data.columns[7].values assert -1 == testl[0] assert -2 == testl[1] tests = data.columns[8].values assert "abc" == tests[0] assert "de" == tests[1] testm = data.columns[9] self.checkMaskCol(testm) # testla = data.columns[10].values # assert [-2, -1] == testla[0] # assert [1, 2] == testla[1] # testda = data.columns[11].values # assert [-0.25, -0.5] == testda[0] # assert [0.125, 0.0625] == testda[1] ofile = table.getOriginalFile() print "Created OriginalFile:", ofile.getId().val
def _testCreateAllColumnsAndMetadata(self): grid = self.client.sf.sharedResources() repoMap = grid.repositories() repoObj = repoMap.descriptions[0] table = grid.newTable(repoObj.id.val, "/test") assert table # Supported metadata types # https://github.com/ome/omero-py/blob/v5.5.1/src/omero/hdfstorageV2.py#L466 metadata = { 'string': rstring('a'), 'int': rint(1), 'long': rlong(1), 'double': rfloat(0.1), } fcol = columns.FileColumnI('filecol', 'file col') fcol.values = [10, 20] icol = columns.ImageColumnI('imagecol', 'image col') icol.values = [30, 40] rcol = columns.RoiColumnI('roicol', 'roi col') rcol.values = [50, 60] wcol = columns.WellColumnI('wellcol', 'well col') wcol.values = [70, 80] pcol = columns.PlateColumnI('platecol', 'plate col') pcol.values = [90, 100] bcol = columns.BoolColumnI('boolcol', 'bool col') bcol.values = [True, False] dcol = columns.DoubleColumnI('doublecol', 'double col') dcol.values = [0.25, 0.5] lcol = columns.LongColumnI('longcol', 'long col') lcol.values = [-1, -2] scol = columns.StringColumnI('stringcol', 'string col', 3) scol.values = ["abc", "de"] larr = columns.LongArrayColumnI('longarr', 'longarr col', 2) larr.values = [[-2, -1], [1, 2]] farr = columns.FloatArrayColumnI('floatarr', 'floatarr col', 2) farr.values = [[-0.25, -0.5], [0.125, 0.0625]] darr = columns.DoubleArrayColumnI('doublearr', 'doublearr col', 2) darr.values = [[-0.25, -0.5], [0.125, 0.0625]] dscol = columns.DatasetColumnI('datasetcol', 'dataset col') dscol.values = [110, 120] mask = self.createMaskCol() cols = [ fcol, icol, rcol, wcol, pcol, bcol, dcol, lcol, scol, mask, larr, farr, darr, dscol ] table.initialize(cols) table.setAllMetadata(metadata) table.addData(cols) data = table.readCoordinates([0, 1]) testf = data.columns[0].values assert 10 == testf[0] assert 20 == testf[1] testi = data.columns[1].values assert 30 == testi[0] assert 40 == testi[1] testr = data.columns[2].values assert 50 == testr[0] assert 60 == testr[1] testw = data.columns[3].values assert 70 == testw[0] assert 80 == testw[1] testp = data.columns[4].values assert 90 == testp[0] assert 100 == testp[1] testb = data.columns[5].values assert testb[0] assert not testb[1] testd = data.columns[6].values assert 0.25 == testd[0] assert 0.5 == testd[1] testl = data.columns[7].values assert -1 == testl[0] assert -2 == testl[1] tests = data.columns[8].values assert "abc" == tests[0] assert "de" == tests[1] testm = data.columns[9] self.checkMaskCol(testm) testla = data.columns[10].values assert [-2, -1] == testla[0] assert [1, 2] == testla[1] testfa = data.columns[11].values assert [-0.25, -0.5] == testfa[0] assert [0.125, 0.0625] == testfa[1] testda = data.columns[12].values assert [-0.25, -0.5] == testda[0] assert [0.125, 0.0625] == testda[1] testds = data.columns[13].values assert 110 == testds[0] assert 120 == testds[1] ofile = table.getOriginalFile() print("Created OriginalFile:", ofile.getId().val) return table
def testAllColumnsSameTable(self): """ Check all column types can coexist in the same table """ grid = self.client.sf.sharedResources() repoMap = grid.repositories() repoObj = repoMap.descriptions[0] table = grid.newTable(repoObj.id.val, "/test") assert table fcol = columns.FileColumnI('filecol', 'file col') fcol.values = [10, 20] icol = columns.ImageColumnI('imagecol', 'image col') icol.values = [30, 40] rcol = columns.RoiColumnI('roicol', 'roi col') rcol.values = [50, 60] wcol = columns.WellColumnI('wellcol', 'well col') wcol.values = [70, 80] pcol = columns.PlateColumnI('platecol', 'plate col') pcol.values = [90, 100] bcol = columns.BoolColumnI('boolcol', 'bool col') bcol.values = [True, False] dcol = columns.DoubleColumnI('doublecol', 'double col') dcol.values = [0.25, 0.5] lcol = columns.LongColumnI('longcol', 'long col') lcol.values = [-1, -2] scol = columns.StringColumnI('stringcol', 'string col', 46) scol.values = ["მიკროსკოპის პონი", "de"] mask = self.createMaskCol() larr = columns.LongArrayColumnI('longarr', 'longarr col', 2) larr.values = [[-2, -1], [1, 2]] farr = columns.FloatArrayColumnI('floatarr', 'floatarr col', 2) farr.values = [[-8.0, -4.0], [16.0, 32.0]] darr = columns.DoubleArrayColumnI('doublearr', 'doublearr col', 2) darr.values = [[-0.25, -0.5], [0.125, 0.0625]] cols = [ fcol, icol, rcol, wcol, pcol, bcol, dcol, lcol, scol, mask, larr, farr, darr ] table.initialize(cols) table.addData(cols) data = table.readCoordinates([0, 1]) testf = data.columns[0].values assert 10 == testf[0] assert 20 == testf[1] testi = data.columns[1].values assert 30 == testi[0] assert 40 == testi[1] testr = data.columns[2].values assert 50 == testr[0] assert 60 == testr[1] testw = data.columns[3].values assert 70 == testw[0] assert 80 == testw[1] testp = data.columns[4].values assert 90 == testp[0] assert 100 == testp[1] testb = data.columns[5].values assert testb[0] assert not testb[1] testd = data.columns[6].values assert 0.25 == testd[0] assert 0.5 == testd[1] testl = data.columns[7].values assert -1 == testl[0] assert -2 == testl[1] tests = data.columns[8].values assert "მიკროსკოპის პონი" == tests[0] assert "de" == tests[1] testm = data.columns[9] self.checkMaskCol(testm) testla = data.columns[10].values assert [-2, -1] == testla[0] assert [1, 2] == testla[1] testfa = data.columns[11].values assert [-8.0, -4.0] == testfa[0] assert [16.0, 32.0] == testfa[1] testda = data.columns[12].values assert [-0.25, -0.5] == testda[0] assert [0.125, 0.0625] == testda[1] ofile = table.getOriginalFile() print(("testAllColumnsSameTable", "OriginalFile:", ofile.getId().val)) # Now try an update updatel = omero.grid.LongColumn('longcol', '', [12345]) updatela = omero.grid.LongArrayColumn('longarr', '', 2, [[654, 321]]) updateData = omero.grid.Data(rowNumbers=[1], columns=[updatel, updatela]) table.update(updateData) assert table.getNumberOfRows() == 2 data2 = table.readCoordinates([0, 1]) for n in [0, 1, 2, 3, 4, 5, 6, 8, 11, 12]: assert data.columns[n].values == data2.columns[n].values self.checkMaskCol(data2.columns[9]) testl2 = data2.columns[7].values assert -1 == testl2[0] assert 12345 == testl2[1] testla2 = data2.columns[10].values assert [-2, -1] == testla2[0] assert [654, 321] == testla2[1] table.delete() table.close()