def _run_tstSaveAsBinarySeries(self, testidx, narys_, valdtype, groupingdim_): """Pseudo-parameterized test fixture, allows reusing existing spark context """ paramstr = "(groupingdim=%d, valuedtype='%s')" % (groupingdim_, valdtype) arys, aryshape, arysize = _generate_test_arrays(narys_, dtype_=valdtype) dims = aryshape[:] outdir = os.path.join(self.outputdir, "anotherdir%02d" % testidx) images = ImagesLoader(self.sc).fromArrays(arys) images.saveAsBinarySeries(outdir, groupingDim=groupingdim_) ndims = len(aryshape) # prevent padding to 4-byte boundaries: "=" specifies no alignment unpacker = struct.Struct('=' + 'h'*ndims + dtype(valdtype).char*narys_) def calcExpectedNKeys(): tmpshape = list(dims[:]) del tmpshape[groupingdim_] return prod(tmpshape) expectednkeys = calcExpectedNKeys() def byrec(f_, unpacker_, nkeys_): rec = True while rec: rec = f_.read(unpacker_.size) if rec: allrecvals = unpacker_.unpack(rec) yield allrecvals[:nkeys_], allrecvals[nkeys_:] outfilenames = glob.glob(os.path.join(outdir, "*.bin")) assert_equals(dims[groupingdim_], len(outfilenames)) for outfilename in outfilenames: with open(outfilename, 'rb') as f: nkeys = 0 for keys, vals in byrec(f, unpacker, ndims): nkeys += 1 assert_equals(narys_, len(vals)) for validx, val in enumerate(vals): assert_equals(arys[validx][keys], val, "Expected %g, got %g, for test %d %s" % (arys[validx][keys], val, testidx, paramstr)) assert_equals(expectednkeys, nkeys) confname = os.path.join(outdir, "conf.json") assert_true(os.path.isfile(confname)) with open(os.path.join(outdir, "conf.json"), 'r') as fconf: import json conf = json.load(fconf) assert_equals(outdir, conf['input']) assert_equals(tuple(dims), tuple(conf['dims'])) assert_equals(len(aryshape), conf['nkeys']) assert_equals(narys_, conf['nvalues']) assert_equals(valdtype, conf['valuetype']) assert_equals('int16', conf['keytype']) assert_true(os.path.isfile(os.path.join(outdir, 'SUCCESS')))
def test_roundtripConvertToSeries(self): imagepath = TestImagesUsingOutputDir._findSourceTreeDir("utils/data/fish/tif-stack") outdir = os.path.join(self.outputdir, "fish-series-dir") images = ImagesLoader(self.sc).fromMultipageTif(imagepath) series = images.toSeries(blockSize=76*20) series_ary = series.pack() images.saveAsBinarySeries(outdir, blockSize=76*20) converted_series = SeriesLoader(self.sc).fromBinary(outdir) converted_series_ary = converted_series.pack() assert_equals((76, 87, 2), series.dims.count) assert_equals((20, 76, 87, 2), series_ary.shape) assert_true(array_equal(series_ary, converted_series_ary))
def test_roundtripConvertToSeries(self): imagepath = TestImagesUsingOutputDir._findSourceTreeDir( "utils/data/fish/tif-stack") outdir = os.path.join(self.outputdir, "fish-series-dir") images = ImagesLoader(self.sc).fromMultipageTif(imagepath) series = images.toSeries(blockSize=76 * 20) series_ary = series.pack() images.saveAsBinarySeries(outdir, blockSize=76 * 20) converted_series = SeriesLoader(self.sc).fromBinary(outdir) converted_series_ary = converted_series.pack() assert_equals((76, 87, 2), series.dims.count) assert_equals((20, 76, 87, 2), series_ary.shape) assert_true(array_equal(series_ary, converted_series_ary))
def _run_tstSaveAsBinarySeries(self, testidx, narys_, valdtype, groupingdim_): """Pseudo-parameterized test fixture, allows reusing existing spark context """ paramstr = "(groupingdim=%d, valuedtype='%s')" % (groupingdim_, valdtype) arys, aryshape, arysize = _generate_test_arrays(narys_, dtype_=valdtype) dims = aryshape[:] outdir = os.path.join(self.outputdir, "anotherdir%02d" % testidx) images = ImagesLoader(self.sc).fromArrays(arys) images.saveAsBinarySeries(outdir, groupingDim=groupingdim_) ndims = len(aryshape) # prevent padding to 4-byte boundaries: "=" specifies no alignment unpacker = struct.Struct('=' + 'h' * ndims + dtype(valdtype).char * narys_) def calcExpectedNKeys(): tmpshape = list(dims[:]) del tmpshape[groupingdim_] return prod(tmpshape) expectednkeys = calcExpectedNKeys() def byrec(f_, unpacker_, nkeys_): rec = True while rec: rec = f_.read(unpacker_.size) if rec: allrecvals = unpacker_.unpack(rec) yield allrecvals[:nkeys_], allrecvals[nkeys_:] outfilenames = glob.glob(os.path.join(outdir, "*.bin")) assert_equals(dims[groupingdim_], len(outfilenames)) for outfilename in outfilenames: with open(outfilename, 'rb') as f: nkeys = 0 for keys, vals in byrec(f, unpacker, ndims): nkeys += 1 assert_equals(narys_, len(vals)) for validx, val in enumerate(vals): assert_equals( arys[validx][keys], val, "Expected %g, got %g, for test %d %s" % (arys[validx][keys], val, testidx, paramstr)) assert_equals(expectednkeys, nkeys) confname = os.path.join(outdir, "conf.json") assert_true(os.path.isfile(confname)) with open(os.path.join(outdir, "conf.json"), 'r') as fconf: import json conf = json.load(fconf) assert_equals(outdir, conf['input']) assert_equals(tuple(dims), tuple(conf['dims'])) assert_equals(len(aryshape), conf['nkeys']) assert_equals(narys_, conf['nvalues']) assert_equals(valdtype, conf['valuetype']) assert_equals('int16', conf['keytype']) assert_true(os.path.isfile(os.path.join(outdir, 'SUCCESS')))