grp = h5f.create_group('xpcs') qtmp = grp.create_dataset('q_points', (3, N, N), 'f', chunks=(1, N, N), compression="gzip", shuffle=True) qtmp.attrs['wavelen'] = wavelen qtmp.attrs['theta'] = [-10, 10] qtmp.attrs['theta_units'] = 'degree' qtmp.attrs['content'] = 'qx, qy, qz' qtmp[0, :, :] = q_points[..., 0].reshape((N, N)) qtmp[1, :, :] = q_points[..., 1].reshape((N, N)) qtmp[2, :, :] = q_points[..., 2].reshape((N, N)) # read data dset = grp.create_dataset('imgs', (Nsteps, N, N), 'f', chunks=(1, N, N), compression="gzip", shuffle=True) # turn the crank t0 = time.time() for i, fname in enumerate(binfiles): print(fname) pts = read_lammps_bin(fname) img = mdscatter.dft(pts, q_points) img = np.abs(img)**2 dset[i, :, :] = np.reshape(img, (N, N)) t1 = time.time() - t0 print('time taken = %f\n' % t1)
if __name__ == '__main__': wavelen = 0.1 qvals = qvals(nrow = N, ncol = N) outf = 'xpcs' + str(N).zfill(5) + '.h5' h5f = h5py.File(outf, 'w') grp = h5f.create_group('xpcs') qtmp = grp.create_dataset('q_points', (3, N*N), 'f') qtmp.attrs['wavelen'] = wavelen qtmp.attrs['theta'] = [-10, 10] qtmp.attrs['theta_units'] = 'degree' # List data files sorted by step number. steps = sorted(glob.glob('data/*.txt'), key=lambda name: int(name[8:-4])) Nsteps = len(steps) dset = grp.create_dataset('imgs', (Nsteps, N, N), 'f') # turn the crank t0 = time.time() for i, npy in tqdm.tqdm(enumerate(steps), total=Nsteps): # Data is id, x, y ,z. Drop the id column. # Multiply by 16 to get the q-range for 8-ID-I setup. pts = 16 * np.loadtxt(npy, skiprows=9)[:, 1:] img = mdscatter.dft(pts, qvals) img = np.abs(img)**2 dset[i,:,:] = np.reshape(img, (N, N)) t1 = time.time() - t0 print('time taken = %f\n' % t1) h5f.close()
sdd = 4. scale = 28 center = (0, 768) beam_rad = 6 * scale detector = Lambda750k() qvecs = detector.qvectors(sdd, center, wavelen) outf = 'xpcs_out.h5' h5f = h5py.File(outf, 'w') grp = h5f.create_group('xpcs') qtmp = grp.create_dataset('q_points', (3, *detector.shape), 'f') # read data pattern = '3(\d){4}' npys = filelist('/home/dkumar/Data/np_arrays', pattern) Nsteps = len(npys) dset = grp.create_dataset('imgs', (Nsteps, *detector.shape), 'f') # turn the crank t0 = time.time() for i, npy in enumerate(npys): pts = load_npy(npy, center=np.array([8, 8, 8]), scale=scale) img = mdscatter.dft(pts, qvals, beam_rad) img = np.abs(img)**2 img = np.reshape(img, detector.shape) dset[i, :, :] = np.reshape(img, detector.shape) t1 = time.time() - t0 print('time taken = %f\n' % t1) h5f.close()