def wheel_pass_debugging(road: Road, wheel: Wheel, max_iterations: int, bump_method: str, dig_method: str, dig_probability_arguments: list, smoothing_method: str, smoothing_arguments: list): """ This function performs a loop of wheel passes through a road until 'max_iterations' wheel passes have been performed. The successive wheel passes modify the road surface. :param road: a Road :param wheel: a Wheel :param max_iterations: maximum number of wheel passes (iterations) :param bump_method: str, method name for the 'determine_bump_height' function :param dig_method: str, method name for the 'digging' function :param dig_probability_arguments: :param smoothing_method: :param smoothing_arguments: """ current_passes = 0 while wheel.number_of_passes < max_iterations: if current_passes < wheel.number_of_passes: smoothing(road, wheel, smoothing_method, smoothing_arguments) current_passes = wheel.number_of_passes initial_position = wheel.xf print_road_surface(road, wheel.xf, wheel.diameter) bump_position = wheel.move_to_next_bump(road) bump_height = determine_bump_height(road, wheel, bump_position, method=bump_method) print_road_surface(road, wheel.xf, wheel.diameter) wheel.jump(road, bump_height) print_road_surface(road, wheel.xf, wheel.diameter) digging(road, wheel, wheel.xf, method=dig_method, dig_probability_args=dig_probability_arguments) wheel.update_position(wheel.diameter) wheel.set_elevation(road.piles[wheel.xf]) final_position = wheel.xf if final_position <= initial_position: smoothing(road, wheel, smoothing_method, smoothing_arguments) wheel.number_of_passes += 1 print(f'\nIteration number {wheel.number_of_passes}') print( f'The number of grains is {road.get_number_of_grains()}, the initial was {road.initial_number_of_grains}\n' ) print_road_surface(road, wheel.xf, wheel.diameter)
def online_train(step=1): start_time = time.time() data_labels, tag_map = create_matrix.get_data_labels_and_tag_map() fm_obj = fm_online.FmOnline(data_labels, tag_map) fm_obj.prepare_train(0.005, K=8, step=1) fm_obj.fit(step) fm_obj.calc_error() fm_obj.arrange_user() redis_flush() fm_obj.cy_fm.save_redis() labels = fm_obj.labels save_params_into_radis(labels, tag_map) # labelsをredisに保存 smoothing() fm_obj.save_top_k_ranking_all_user(smoothing_flag = True) print time.time() - start_time
def smoothing_validation(): start_time = time.time() learning_matrix, regs_matrix, labels, targets, tag_map, ratelist, train_songs, validation_songs = create_matrix.create_smoothing_fm_matrix() redis_flush(db=1) save_songs("train_songs", train_songs) save_songs("validation_songs", validation_songs) print "FMクラス初期化" fm_obj = fm_batch.FmBatch(learning_matrix, regs_matrix, labels, targets, tag_map) print "SGDで学習開始" fm_obj.learning(0.005, K=8, step=1) fm_obj.arrange_user() fm_obj.cy_fm.save_redis(db=1) labels = fm_obj.labels save_params_into_radis(labels, tag_map) # labelsをredisに保存 #fm_obj.smoothing(smoothing_evaluate=True) smoothing(smoothing_evaluate=True) print time.time() - start_time
def batch_train(): start_time = time.time() learning_matrix, regs_matrix, labels, targets, tag_map, ratelist = create_matrix.create_fm_matrix() print "FMクラス初期化" fm_obj = fm_batch.FmBatch(learning_matrix, regs_matrix, labels, targets, tag_map) print "SGDで学習開始" fm_obj.learning(0.005, K=8, step=1) fm_obj.arrange_user() #fm_obj.smoothing() print "redisに保存" redis_flush() fm_obj.cy_fm.save_redis() labels = fm_obj.labels save_params_into_radis(labels, tag_map) # labelsをredisに保存 #print "top_k_ranking保存" #fm_obj.save_top_k_ranking_all_user() smoothing() print "top_k_ranking保存" fm_obj.save_top_k_ranking_all_user(smoothing_flag = True) print time.time() - start_time
# Image shape (91, 109, 91, 240) #in_brain_img = make_mask_filtered_data(image_path[1],mask_path) data_int = in_brain_img.get_data() data = data_int.astype(float) mean_data = np.mean(data, axis=-1) in_brain_mask = (mean_data - 0.0) < 0.01 Transpose = False else: img = nib.load(image_path[1]) data_int = img.get_data() data = data_int.astype(float) mean_data = np.mean(data, axis=-1) in_brain_mask = mean_data > thres Transpose = True # Smoothing with Gaussian filter smooth_data = smoothing(data,1,range(data.shape[-1])) # Selecting the voxels in the brain in_brain_tcs = smooth_data[in_brain_mask, :] #in_brain_tcs = data[in_brain_mask, :] vol_shape = data.shape[:-1] # Plotting the voxels in the brain plt.imshow(plot_mosaic(mean_data, transpose=Transpose), cmap='gray', alpha=1) plt.colorbar() plt.contour(plot_mosaic(in_brain_mask, transpose=Transpose),colors='blue') plt.title('In brain voxel mean values' + '\n' + (d_path['type'] + str(name))) plt.savefig(project_path+'fig/BOLD/%s_mean_voxels_countour.png'\ %(d_path['type'] + str(name))) #plt.show() plt.clf()
template_data = template_data_int.astype(float) Transpose = False in_brain_mask = (mean_data - 0.0) < 0.01 plt.imshow(plot_mosaic(template_data, transpose=Transpose),\ cmap='gray', alpha=1) else: img = nib.load(image_path[1]) data_int = img.get_data() data = data_int.astype(float) mean_data = np.mean(data, axis=-1) in_brain_mask = mean_data > thres Transpose = True plt.contour(plot_mosaic(in_brain_mask, transpose=Transpose), \ cmap='gray' , alpha=1) # Smoothing with Gaussian filter smooth_data = smoothing(data,1,range(data.shape[-1])) # Selecting the voxels in the brain in_brain_tcs = smooth_data[in_brain_mask, :] #in_brain_tcs = data[in_brain_mask, :] vol_shape = data.shape[:-1] # Plotting the voxels in the brain plt.imshow(plot_mosaic(mean_data, transpose=Transpose), cmap='gray', alpha=1) plt.colorbar() plt.title('In brain voxel mean values' + '\n' + (d_path['type'] + str(name))) plt.savefig(project_path+'fig/BOLD/%s_mean_voxels.png'\ %(d_path['type'] + str(name))) #plt.show() #plt.clf() # Convolution with 1 to 4 conditions
X_matrix_high_res1 = np.loadtxt(txt_path[6]) X_matrix_high_res2 = np.loadtxt(txt_path[7]) X_matrix_high_res3 = np.loadtxt(txt_path[8]) X_matrix_high_res4 = np.loadtxt(txt_path[9]) X_matrix_high_res = np.ones((len(X_matrix1), p)) X_matrix_high_res[..., 1] = X_matrix_high_res1 X_matrix_high_res[..., 2] = X_matrix_high_res2 X_matrix_high_res[..., 3] = X_matrix_high_res3 X_matrix_high_res[..., 4] = X_matrix_high_res4 beta_4d = glm_beta(data, X_matrix) MRSS, fitted, residuals = glm_mrss(beta_4d, X_matrix, data) # smooth the data and re-run the regression data_smooth = smoothing(data, 1, range(data.shape[-1])) beta_4d_smooth = glm_beta(data_smooth, X_matrix) MRSS_smooth, fitted_smooth, residuals_smooth = glm_mrss( beta_4d_smooth, X_matrix, data_smooth) # use high resolution to create our design matrix beta_4d_high_res = glm_beta(data, X_matrix_high_res) MRSS_high_res, fitted_high_res, residuals_high_res = glm_mrss( beta_4d_high_res, X_matrix_high_res, data) plt.plot(data[4, 22, 11], label="actual") plt.plot(fitted[4, 22, 11], label="fitted") plt.plot(fitted_high_res[4, 22, 11], label="fitted_high_res") plt.title(name[0:17] + "voxel (4,22,11) actual vs fitted") plt.legend(loc="upper left", fontsize="smaller")
X_matrix_high_res1 = np.loadtxt(txt_path[6]) X_matrix_high_res2 = np.loadtxt(txt_path[7]) X_matrix_high_res3 = np.loadtxt(txt_path[8]) X_matrix_high_res4 = np.loadtxt(txt_path[9]) X_matrix_high_res = np.ones((len(X_matrix1),p)) X_matrix_high_res[...,1] = X_matrix_high_res1 X_matrix_high_res[...,2] = X_matrix_high_res2 X_matrix_high_res[...,3] = X_matrix_high_res3 X_matrix_high_res[...,4] = X_matrix_high_res4 beta_4d = glm_beta(data,X_matrix) MRSS, fitted, residuals = glm_mrss(beta_4d, X_matrix, data) # smooth the data and re-run the regression data_smooth = smoothing(data,1,range(data.shape[-1])) beta_4d_smooth = glm_beta(data_smooth,X_matrix) MRSS_smooth, fitted_smooth, residuals_smooth = glm_mrss(beta_4d_smooth, X_matrix, data_smooth) # use high resolution to create our design matrix beta_4d_high_res = glm_beta(data,X_matrix_high_res) MRSS_high_res, fitted_high_res, residuals_high_res = glm_mrss(beta_4d_high_res, X_matrix_high_res, data) plt.plot(data[4,22,11], label = "actual") plt.plot(fitted[4,22,11], label = "fitted") plt.plot(fitted_high_res[4,22,11], label = "fitted_high_res") plt.title(name[0:17]+"voxel (4,22,11) actual vs fitted") plt.legend(loc = "upper left", fontsize = "smaller") plt.savefig(dirs[1] + '/'+ name[0:17]+ "_glm_fitted.png")
thres = 375 #from analysis of the histograms for image_path in images_paths: name = image_path[0] print("Starting t-test analysis and plot for subject "+name[9:12]) img = nib.load(image_path[1]) data_int = img.get_data() data = data_int.astype(float) vol_shape = data.shape[:-1] n_trs = data.shape[-1] #get the mean value mean_data = np.mean(data, axis = -1) #build the mask in_brain_mask = mean_data > 375 #smooth the data set smooth_data = smoothing(data, 1, range(n_trs)) #initialize design matrix for t test p = 7 X_matrix = np.ones((data.shape[-1], p)) #build our design matrix for cond in range(1,5): convolved = np.loadtxt(txt_path + name + '_conv_' + str(cond).zfill(3) + '_high_res.txt') #convolved = np.loadtxt(txt_path + name + '_conv_' + str(cond).zfill(3) + '_canonical.txt') X_matrix[:,cond] = convolved linear_drift = np.linspace(-1, 1, n_trs) X_matrix[:,5] = linear_drift quadratic_drift = linear_drift ** 2 quadratic_drift -= np.mean(quadratic_drift) X_matrix[:,6] = quadratic_drift beta, t, df, p = t_stat(smooth_data, X_matrix) for cond in range(0,4):