コード例 #1
0
ファイル: test_io.py プロジェクト: silx-kit/pyFAI
 def test_writer(self):
     if io.h5py is None:
         logger.warning(
             "H5py is absent on the system, skip HDF5 writing test")
         return
     h5file = os.path.join(self.tmpdir, "junk.h5")
     shape = 1024, 1024
     n = 100
     m = 10  # number of frames in memory
     data = numpy.random.random(
         (m, shape[0], shape[1])).astype(numpy.float32)
     nmbytes = data.nbytes / 1e6 * n / m
     t0 = time.perf_counter()
     writer = io.HDF5Writer(filename=h5file, hpath="data")
     writer.init({"nbpt_azim": shape[0], "nbpt_rad": shape[1]})
     for i in range(n):
         writer.write(data[i % m], i)
     writer.close()
     t = time.perf_counter() - t0
     logger.info(
         "Writing of HDF5 of %ix%s (%.3fMB) took %.3f (%.3fMByte/s)", n,
         shape, nmbytes, t, nmbytes / t)
     statinfo = os.stat(h5file)
     self.assertTrue(
         statinfo.st_size / 1e6 > nmbytes,
         "file size (%s) is larger than dataset" % statinfo.st_size)
コード例 #2
0
ファイル: profile_hdf5.py プロジェクト: isaxs/pyFAI
def bench_hdf5(n=1024,
               shape=(1024, 1024),
               dtype="float32",
               dirname=None,
               bsize=10):
    """
    Actually performs the HDF5 writing benchmark 
    @param n: number of frames to be written
    @param shape: 2-tuple of integer describing the shape of the image 
    @param bsize: number of frames in buffer
    """
    tmp_dir = tempfile.mkdtemp(dir=dirname)
    h5file = os.path.join(tmp_dir, "junk.h5")
    logger.info("Writing large dataset %ix(%i,%i) of %s to %s." %
                (n, shape[0], shape[1], dtype, h5file))

    dtype = numpy.dtype(dtype)
    if dtype.kind == "f":
        data = numpy.random.random((bsize, shape[0], shape[1])).astype(dtype)
    elif dtype.name.find("int") >= 0:
        size = bsize * shape[0] * shape[1]
        maxi = 2**(dtype.itemsize * 8 - 1) - 1
        data = numpy.random.random_integers(0, maxi, size=size).astype(dtype)
        data.shape = (bsize, shape[0], shape[1])
    else:
        raise RuntimeError("unhandled data type %s" % dtype)
    size = n * shape[0] * shape[1]
    nbytes = size * dtype.itemsize
    nmbytes = nbytes / 1e6
    t0 = time.time()
    writer = io.HDF5Writer(filename=h5file, hpath="data")
    writer.init({
        "nbpt_azim": shape[0],
        "nbpt_rad": shape[1],
        "dtype": dtype.name
    })
    for i in range(n):
        writer.write(data[i % bsize], i)
    writer.close()
    t = time.time() - t0
    bps = nbytes / t
    logger.info("Writing of %.3fMB in HDF5 took %.3fs (%.3f MByte/s)" %
                (nmbytes, t, nmbytes / t))
    statinfo = os.stat(h5file)
    assert statinfo.st_size > nbytes

    # Clean up
    os.unlink(h5file)
    os.removedirs(tmp_dir)
    return bps
コード例 #3
0
        if os.path.isdir(hurl):
            # write .dat or .edf files ...
            if options.cake < 2:
                writer = io.AsciiWriter(hurl)
        # Else HDF5
        else:
            if hurl.startswith("hdf5:"):
                hurl = hurl[5:]
            if ":" in hurl:
                hsplit = hurl.split(":")
                hdfpath = hsplit[-1]
                hdffile = ":".join(hsplit[:-1])  # special windows
            else:
                hdfpath = "test_LImA+pyFAI"
                hdffile = hurl
            writer = io.HDF5Writer(hdffile, hdfpath, options.scan)
    elif len(args) > 1:
        logger.error(
            "Specify the HDF5 output file like hdf5:///home/user/filename.h5:/path/to/group"
        )
        sys.exit(1)
    else:
        writer = None

    if options.verbose:
        logger.info("setLevel: debug")
        logger.setLevel(logging.DEBUG)
    if options.lima:
        sys.path.insert(0, options.lima)
    try:
        from Lima import Core, Basler