def gen_reflectors_file(x, y, z, coeff, n_file): reflectors = np.zeros((len(x), 4)) reflectors[:, 0] = x reflectors[:, 1] = y reflectors[:, 2] = z reflectors[:, 3] = coeff hdf5util.write_ndarray(reflectors, PROJECT_DIR + "reflectors" + str(n_file) + ".h5", DATASET_NAME)
def process_file(file_path): if in_dir == out_dir: raise ValueError("The input and output directories must be different.") signal_set = hdf5util.read_to_ndarray(in_dir + "/" + file_path, dataset_name) signal_set_out = np.zeros(signal_set.shape) for i in range(signal_set.shape[0]): s_out = filter_signal(signal_set[i, :], i == plot_signal_index) signal_set_out[i, :len(s_out)] = s_out out_path = out_dir + "/" + file_path op = Path(out_path) op.parent.mkdir(parents=True, exist_ok=True) hdf5util.write_ndarray(signal_set_out, out_path, dataset_name)
for i in range(GROUND_NUM_POINTS): reflectors[i, 0] = random.uniform(GROUND_X_MIN, GROUND_X_MAX) reflectors[i, 1] = random.uniform(GROUND_Y_MIN, GROUND_Y_MAX) reflectors[i, 2] = random.uniform(GROUND_Z_MIN, GROUND_Z_MAX) reflectors[i, 3] = random.uniform(GROUND_COEFF_MIN, GROUND_COEFF_MAX) offset = GROUND_NUM_POINTS for i in range(COLUMN_NUM_POINTS): r = abs(random.gauss(0.0, COLUMN_MAX_RADIUS / 2.0)) theta = random.uniform(0.0, 2.0 * np.pi) z0 = (COLUMN_Z_MIN + COLUMN_Z_MAX) / 2.0 z = abs(random.gauss(z0, 0.5 * (COLUMN_Z_MAX - COLUMN_Z_MIN) / 2.0)) if z > COLUMN_Z_MAX: z = COLUMN_Z_MAX reflectors[offset + i, 0] = COLUMN_X_CENTER + r * np.cos(theta) reflectors[offset + i, 1] = COLUMN_Y_CENTER + r * np.sin(theta) reflectors[offset + i, 2] = z reflectors[offset + i, 3] = COLUMN_COEFF_MIN + (COLUMN_COEFF_MAX - COLUMN_COEFF_MIN) * random.random() hdf5util.write_ndarray(reflectors, PROJECT_DIR + "reflectors-random_column_and_ground.h5", DATASET_NAME) fig = plt.figure() ax = fig.add_subplot(111, projection='3d') ax.scatter(reflectors[:, 0], reflectors[:, 1], reflectors[:, 2]) ax.set_xlabel('x (m)') ax.set_ylabel('y (m)') ax.set_zlabel('z (m)')
wx_max = np.deg2rad(ANGLE) wx_list = np.arange(0.0, wx_max, GRID_STEP / RADIUS) wx_list = np.r_[-wx_list[:0:-1], wx_list] point_list = [] z_min = RADIUS * np.cos(wx_max) for wx in wx_list: z_max = RADIUS * np.cos(wx) wy_max = np.arccos(z_min / z_max) wy_list = np.arange(0.0, wy_max, GRID_STEP / z_max) wy_list = np.r_[-wy_list[:0:-1], wy_list] y = Y_CENTER + RADIUS * np.sin(wx) for wy in wy_list: point_list.append([ X_CENTER + z_max * np.sin(wy), y, Z_CENTER + (z_max * np.cos(wy)) * Z_COEF, 1.0 ]) reflectors = np.array(point_list) hdf5util.write_ndarray(reflectors, PROJECT_DIR + "reflectors-spherical_cap.h5", DATASET_NAME) fig = plt.figure() ax = fig.add_subplot(111, projection='3d') ax.scatter(reflectors[:, 0], reflectors[:, 1], reflectors[:, 2]) ax.set_xlabel('x (m)') ax.set_ylabel('y (m)') ax.set_zlabel('z (m)')
#!/usr/bin/env python3 # This file is in the public domain. import matplotlib.pyplot as plt import numpy as np from util import hdf5util from scipy.special import iv x = np.arange(0.0, 50.0, 0.1) bessel_i0 = iv(0.0, x) plt.figure() plt.plot(x, bessel_i0) plt.title('bessel I0') hdf5util.write_ndarray(data=x, file_path='bessel_i0_x.h5', dataset_name='v') hdf5util.write_ndarray(data=bessel_i0, file_path='bessel_i0.h5', dataset_name='v') plt.show()
plt.figure() plt.plot(x1_f.real, 'r') plt.plot(x1_f.imag, 'b') plt.title('x1_f') plt.figure() plt.plot(x2_f.real, 'r') plt.plot(x2_f.imag, 'b') plt.title('x2_f') plt.figure() plt.plot(x3_f.real, 'r') plt.plot(x3_f.imag, 'b') plt.title('x3_f') hdf5util.write_ndarray(data=x1, file_path='fft_x_4000.h5', dataset_name='v') hdf5util.write_ndarray(data=x2, file_path='fft_x_2048.h5', dataset_name='v') hdf5util.write_ndarray(data=x3, file_path='fft_x_3571.h5', dataset_name='v') hdf5util.write_ndarray(data=x1_f.real, file_path='fft_yr_4000.h5', dataset_name='v') hdf5util.write_ndarray(data=x1_f.imag, file_path='fft_yi_4000.h5', dataset_name='v') hdf5util.write_ndarray(data=x2_f.real, file_path='fft_yr_2048.h5', dataset_name='v') hdf5util.write_ndarray(data=x2_f.imag, file_path='fft_yi_2048.h5',
result_queue)) proc_list.append(proc) proc.start() # Get results. for i in range(num_jobs): result = result_queue.get() image[result.ix, :] = result.image_line # Signal the processes to end their execution. for i in range(mproc.cpu_count()): job_queue.put("END") # Wait for the end of the processes. for proc in proc_list: # join() must be called after the result queue is emptied. proc.join() end_time = time.time() print("Processing time: {} s".format(end_time - start_time)) plt.figure() plt.pcolormesh(grid_z - 0.5 * z_step, grid_x - 0.5 * x_step, image) plt.grid(True) #plt.axis("Equal") plt.xlabel("z (m)") plt.ylabel("x (m)") hdf5util.write_ndarray(image, "output/image_value.h5", "value") hdf5util.write_ndarray(grid_x, "output/image_x.h5", "x") hdf5util.write_ndarray(grid_z, "output/image_z.h5", "z") plt.show()
print('h.shape:', h.shape) ah = np.abs(h) plt.figure() plt.plot(x) plt.title('x') plt.grid(True) plt.figure() plt.plot(h.real, 'r') plt.plot(h.imag, 'b') plt.title('h') plt.grid(True) plt.figure() plt.plot(ah, 'r') plt.plot(np.abs(x), 'b') plt.title('ah') plt.grid(True) hdf5util.write_ndarray(data=x, file_path='hilbert_x.h5', dataset_name='v') hdf5util.write_ndarray(data=ah, file_path='hilbert_ya.h5', dataset_name='v') hdf5util.write_ndarray(data=h.real, file_path='hilbert_yr.h5', dataset_name='v') hdf5util.write_ndarray(data=h.imag, file_path='hilbert_yi.h5', dataset_name='v') plt.show()
return window_size, window_beta tol_list = np.arange(1e-6, 0.2, 1e-4) tol_db_list = -20.0 * np.log10(tol_list) beta_list = np.empty_like(tol_db_list) for i, tol in enumerate(np.nditer(tol_db_list)): beta_list[i] = beta(tol) beta_list_ref = np.empty_like(tol_db_list) for i, tol in enumerate(np.nditer(tol_db_list)): beta_list_ref[i] = kaiser_beta(tol) print('max abs beta error:', np.abs(beta_list - beta_list_ref).max()) hdf5util.write_ndarray(data=tol_db_list, file_path='kaiser_tol_db.h5', dataset_name='v') hdf5util.write_ndarray(data=beta_list_ref, file_path='kaiser_beta.h5', dataset_name='v') plt.figure() plt.plot(tol_db_list, beta_list, label='local') plt.plot(tol_db_list, beta_list_ref, label='scipy') plt.title('Beta list') plt.legend(loc='upper right', labelspacing=0.2) trans_width_list = np.array([0.05, 0.1, 0.5]) size_matrix = np.zeros((len(trans_width_list), len(tol_list)), dtype=int) size_matrix_ref = np.empty_like(size_matrix) for i, trans_width in enumerate(np.nditer(trans_width_list)): plt.figure()
from util import hdf5util, windowfunction import matplotlib.pyplot as plt DATASET_NAME = "apod" #============================================================================== def show_usage(): print("Usage:") print("{} <window_size>".format(sys.argv[0])) if __name__ == "__main__": if len(sys.argv) != 2: show_usage() sys.exit(1) window_size = int(sys.argv[1]) if window_size <= 0: window_size = 1 elif window_size > 4096: window_size = 4096 #apod = windowfunction.get_blackman_window(window_size) apod = windowfunction.get_blackman2_window(window_size) hdf5util.write_ndarray(apod, "apod_1d_blackman-" + str(window_size) + ".h5", DATASET_NAME) plt.stem(apod) plt.show()
DATASET_NAME = "apod" #============================================================================== def show_usage(): print("Usage:") print("{} <window_size>".format(sys.argv[0])) if __name__ == "__main__": if len(sys.argv) != 2: show_usage() sys.exit(1) window_size = int(sys.argv[1]) if window_size <= 0: window_size = 1 elif window_size > 4096: window_size = 4096 apod = np.ones(window_size) hdf5util.write_ndarray(apod, "apod_1d_rectangular-" + str(window_size) + ".h5", DATASET_NAME) plt.stem(apod) plt.show()
DECIMATION_FILTER_TOLERANCE = 0.0001 DECIMATION_FILTER_TRANSITION_WIDTH = 0.3 # fraction of the original bandwidth FS = 400e6 DECIMATION_OFFSET = 32 #DECIMATION_OFFSET = 33 ascan = hdf5util.read_to_ndarray(file_path='ref_pulse.h5', dataset_name='ascan') ascan = np.hstack((ascan, np.zeros(500))) ascan = np.hstack((ascan, np.ones(100))) ascan = np.hstack((ascan, np.zeros(500))) t = arrayutil.get_time_sequence(len(ascan), FS) hdf5util.write_ndarray(data=ascan, file_path='decimation_source.h5', dataset_name='v') for decimation_factor in [2, 5, 10]: print('### decimation_factor: {}'.format(decimation_factor)) decim_filter = decimation.downsampling_filter(decimation_factor=decimation_factor, half_transition_width=DECIMATION_FILTER_TRANSITION_WIDTH, tolerance=DECIMATION_FILTER_TOLERANCE, plot=False) print('filter length: {}'.format(len(decim_filter))) ascan_d_offset, ascan_d, t_d = decimation.decimate(DECIMATION_OFFSET, ascan, decimation_factor, decim_filter, t) print('len(ascan) {}'.format(len(ascan)))
reflectors[i, 1] = random.uniform(GROUND_Y_MIN, GROUND_Y_MAX) reflectors[i, 2] = random.uniform(GROUND_Z_MIN, GROUND_Z_MAX) reflectors[i, 3] = random.uniform(GROUND_COEFF_MIN, GROUND_COEFF_MAX) dx = (PAIR_X_MAX - PAIR_X_MIN) / (NUM_POINT_PAIRS - 1) dy = (PAIR_Y_MAX - PAIR_Y_MIN) / (NUM_POINT_PAIRS - 1) dz = (PAIR_Z_MAX - PAIR_Z_MIN) / (NUM_POINT_PAIRS - 1) for i in range(NUM_POINT_PAIRS): offset = GROUND_NUM_POINTS reflectors[offset + i, 0] = PAIR_X_MIN + i * dx - 0.5 * PAIR_DISTANCE reflectors[offset + i, 1] = PAIR_Y_MIN + i * dy reflectors[offset + i, 2] = PAIR_Z_MIN + i * dz reflectors[offset + i, 3] = PAIR_COEFF for i in range(NUM_POINT_PAIRS): offset = GROUND_NUM_POINTS + NUM_POINT_PAIRS reflectors[offset + i, 0] = PAIR_X_MIN + i * dx + 0.5 * PAIR_DISTANCE reflectors[offset + i, 1] = PAIR_Y_MIN + i * dy reflectors[offset + i, 2] = PAIR_Z_MIN + i * dz reflectors[offset + i, 3] = PAIR_COEFF hdf5util.write_ndarray(reflectors, PROJECT_DIR + "reflectors-point_pairs_and_ground.h5", DATASET_NAME) fig = plt.figure() ax = fig.add_subplot(111, projection='3d') ax.scatter(reflectors[:, 0], reflectors[:, 1], reflectors[:, 2]) ax.set_xlabel('x (m)') ax.set_ylabel('y (m)') ax.set_zlabel('z (m)')
X_MIN = -0.7 X_MAX = 0.7 Y_MIN = -0.1 Y_MAX = 0.1 Z_MIN = 0.7 Z_MAX = 0.701 NUM_POINTS = 10000 COEFF_MIN = 1.0 COEFF_MAX = 5.0 #============================================================================== random.seed(42) reflectors = np.zeros((NUM_POINTS, 4)) for i in range(NUM_POINTS): reflectors[i, 0] = random.uniform(X_MIN, X_MAX) reflectors[i, 1] = random.uniform(Y_MIN, Y_MAX) reflectors[i, 2] = random.uniform(Z_MIN, Z_MAX) reflectors[i, 3] = random.uniform(COEFF_MIN, COEFF_MAX) hdf5util.write_ndarray(reflectors, PROJECT_DIR + "reflectors-ground.h5", DATASET_NAME) fig = plt.figure() ax = fig.add_subplot(111, projection='3d') ax.scatter(reflectors[:, 0], reflectors[:, 1], reflectors[:, 2]) ax.set_xlabel('x (m)') ax.set_ylabel('y (m)') ax.set_zlabel('z (m)')
import matplotlib.pyplot as plt import numpy as np from util import arrayutil, hdf5util, interpolation UPSAMP_FILTER_TOLERANCE = 0.0001 UPSAMP_FILTER_TRANSITION_WIDTH = 0.45 # fraction of the original bandwidth FS = 40e6 ascan = hdf5util.read_to_ndarray( file_path='base48_tx15_rx15-calib_plane_40mm_avg1.h5', dataset_name='ascan') ascan = ascan.T[2000:3000].flatten() t = arrayutil.get_time_sequence(len(ascan), FS) hdf5util.write_ndarray(data=ascan, file_path='interp_source.h5', dataset_name='v') for upsamp_factor in [2, 8, 64]: print('### upsamp_factor:', upsamp_factor) upsamp_filter = interpolation.upsampling_filter( upsamp_factor=upsamp_factor, half_transition_width=UPSAMP_FILTER_TRANSITION_WIDTH, tolerance=UPSAMP_FILTER_TOLERANCE, plot=False) print('filter length:', len(upsamp_filter)) ascan_r, t_r = interpolation.interpolate(ascan, upsamp_factor, upsamp_filter, t, FS) offset = (len(ascan_r) - len(ascan) * upsamp_factor) // 2 print('offset:', offset)