import sys import tables import tables._comp_bzip2 # We don't build this one on Windows. if not sys.platform == "win32": import tables._comp_lzo import tables.hdf5extension import tables.indexesextension import tables.linkextension import tables.lrucacheextension import tables.tableextension import tables.utilsextension tables.print_versions() if sys.platform.startswith('linux'): lzo_ver = tables.which_lib_version("lzo")[1] assert lzo_ver == '2.06', lzo_ver if sys.platform == 'win32': print(""" Skipping tables.test() on Windows. There is currently one test failure, and we have to look into this. """) else: from multiprocessing import freeze_support freeze_support() tables.test()
chunkshape=CHUNKSHAPE, ) # Do the actual computation and store in output ex = tb.Expr(expr) # parse the expression ex.setOutput(r) # where is stored the result? # when commented out, the result goes in-memory ex.eval() # evaluate! f.close() print_filesize(h5fname, clib, clevel) if __name__ == '__main__': tb.print_versions() print "Total size for datasets:", round(2*N*dtype.itemsize/MB, 1), "MB" # Get the compression libraries supported #supported_clibs = [clib for clib in ("zlib", "lzo", "bzip2", "blosc") #supported_clibs = [clib for clib in ("zlib", "lzo", "blosc") supported_clibs = [clib for clib in ("blosc",) if tb.whichLibVersion(clib)] # Initialization code #for what in ["numpy", "numpy.memmap", "numexpr"]: for what in ["numpy", "numexpr"]: #break print "Populating x using %s with %d points..." % (what, N) t0 = time()
import sys import tables import tables._comp_bzip2 # We don't build this one on Windows. if not sys.platform == "win32": import tables._comp_lzo import tables.hdf5extension import tables.indexesextension import tables.linkextension import tables.lrucacheextension import tables.tableextension import tables.utilsextension tables.print_versions() if sys.platform.startswith('linux'): lzo_ver = tables.which_lib_version("lzo")[1] assert lzo_ver == '2.06', lzo_ver from multiprocessing import freeze_support freeze_support() tables.test()
chunkshape=CHUNKSHAPE, ) # Do the actual computation and store in output ex = tb.Expr(expr) # parse the expression ex.set_output(r) # where is stored the result? # when commented out, the result goes in-memory ex.eval() # evaluate! f.close() print_filesize(h5fname, clib, clevel) if __name__ == '__main__': tb.print_versions() print(f"Total size for datasets: {2 * N * dtype.itemsize / MB:.1f} MB") # Get the compression libraries supported # supported_clibs = [clib for clib in ("zlib", "lzo", "bzip2", "blosc") # supported_clibs = [clib for clib in ("zlib", "lzo", "blosc") supported_clibs = [clib for clib in ("blosc",) if tb.which_lib_version(clib)] # Initialization code # for what in ["numpy", "numpy.memmap", "numexpr"]: for what in ["numpy", "numexpr"]: # break print("Populating x using %s with %d points..." % (what, N)) t0 = clock()