def bootfit(x, y, nsimul, errors=1): m_array = np.empty(nsimul) m_array[:] = np.nan b_array = np.empty(nsimul) b_array[:] = np.nan boot_polyfit_results = miniutils.parallel_progbar(boot_polyfit, zip([x]*nsimul, [y]*nsimul, np.random.randint(0,100*nsimul,nsimul)), nprocs=4, starmap=True) # boot_polyfit_results = miniutils.parallel_progbar(boot_polyfit, zip([muGaia]*nsimul, [muVis]*nsimul, np.random.randint(0,10*nsimul,nsimul)), # nprocs=4, starmap=True) # for i in tqdm(range(nsimul)): # index_resamp = bootstrap_resample(index_array) # m_temp, b_temp = np.polyfit(x[index_array], y[index_array], 1) m_array = np.array(boot_polyfit_results)[:,0] b_array = np.array(boot_polyfit_results)[:,1] print(m_array) print(b_array) #print(median_boot) m_median = bn.nanmedian(m_array) b_median = bn.nanmedian(b_array) if(errors == 1): m_s1_up = np.percentile(m_array, s1_up_q*100) m_s1_down = np.percentile(m_array, s1_down_q*100) m_s2_up = np.percentile(m_array, s2_up_q*100) m_s2_down = np.percentile(m_array, s2_down_q*100) m_s3_up = np.percentile(m_array, s3_up_q*100) m_s3_down = np.percentile(m_array, s3_down_q*100) b_s1_up = np.percentile(b_array, s1_up_q*100) b_s1_down = np.percentile(b_array, s1_down_q*100) b_s2_up = np.percentile(b_array, s2_up_q*100) b_s2_down = np.percentile(b_array, s2_down_q*100) b_s3_up = np.percentile(b_array, s3_up_q*100) b_s3_down = np.percentile(b_array, s3_down_q*100) if(errors == 0): s1_up = 0 s1_down = 0 s2_up = 0 s2_down = 0 s3_up = 0 s3_down = 0 output = {"m_median": m_median, "m_s1_up": m_s1_up, "m_s1_down": m_s1_down, "m_s2_up": m_s2_up, "m_s2_down": m_s2_down, "m_s3_up": m_s3_up, "m_s3_down": m_s3_down, "b_median": b_median, "b_s1_up": b_s1_up, "b_s1_down": b_s1_down, "b_s2_up": b_s2_up, "b_s2_down": b_s2_down, "b_s3_up": b_s3_up, "b_s3_down": b_s3_down, } return(output)
def main(): morneve_path = os.path.join(here, 'morneve.txt') if not os.path.exists(morneve_path): download_morneve(morneve_path) devotionals = process_morneve(morneve_path) google_auth = subprocess.run( ['gcloud', 'auth', 'application-default', 'print-access-token'], stdout=subprocess.PIPE).stdout.decode().strip() voices = [('en-gb', 'en-GB-Wavenet-D', 'MALE'), ('en-us', 'en-US-Wavenet-B', 'MALE')] jobs = [(month, day, time, voice_details) for month in devotionals for day in devotionals[month] for time in devotionals[month][day] for voice_details in voices] def create_audio_devotional(month, day, time, voice_details): try: mp3_path = os.path.join(here, 'output', 'morneve', month, str(day), time, '_'.join(voice_details) + '.mp3') if not os.path.exists(mp3_path): os.makedirs(os.path.dirname(mp3_path), exist_ok=True) content = devotionals[month][day][time] convert_to_audio(content, mp3_path, google_auth, *voice_details) except Exception: import traceback print('=' * 80) print("On job {month}-{day}-{time}, voice={voice_details}") print('=' * 80) traceback.print_exc() print('=' * 80) print('=' * 80) parallel_progbar(create_audio_devotional, jobs, starmap=True, nprocs=20)
except: sim_2 = 0 if sim_1 is None: sim_1 = 0 if sim_2 is None: sim_2 = 0 sim_cand_list.append(max(sim_1, sim_2)) if len(sim_cand_list) > 0: sim = max(sim_cand_list) else: sim = 0 sim_col = np.append(sim_col, sim) return sim_col # Multiprocess sim_mat = np.array( parallel_progbar(compute_col, range(n_type), nprocs=n_cpu)) sim_mat = sim_mat + sim_mat.T # Compute the final matrix np.fill_diagonal(sim_mat, auto_sim_list) # Write the similarity with open(sim_matrix_file_path, "w") as sim_matrix_file: np.savetxt(sim_matrix_file, sim_mat, delimiter=";", fmt="%.8f") # Write the words with open(type_freq_file_path, "w") as type_freq_file: for type_item in checked_vocab: type_freq_file.write( f"{type_item};{type_freq_dict[type_item]}\n")
echos = np.array([float(s) for s in args[0].split()], dtype=np.float64) mask = args[1] # create an array of volume handles input_files = args[2:-2] maskimage = sitk.ReadImage(mask) maskdata = sitk.GetArrayFromImage(maskimage) img_dim = maskdata.shape maskdata = np.ravel(maskdata) inputdata = np.zeros( (len(echos), img_dim[0] * img_dim[1] * img_dim[2]), dtype=np.float64 ) for i, image in enumerate(input_files): inputdata[i,] = np.ravel(sitk.GetArrayFromImage(sitk.ReadImage(image))) results = miniutils.parallel_progbar(do_fit, range(inputdata.shape[1])) results = np.asarray(results) s0_est = results[:, 0] t2star_est = results[:, 1] s0_est = sitk.GetImageFromArray(np.clip(s0_est.reshape(img_dim), 0, 1000)) s0_est.CopyInformation(maskimage) sitk.WriteImage(s0_est, s0_outfilename) t2star_est = sitk.GetImageFromArray(np.clip(t2star_est.reshape(img_dim), 0, 1)) t2star_est.CopyInformation(maskimage) sitk.WriteImage(t2star_est, t2star_outfilename)
n_group=n_group, alpha=alpha, beta=beta, kappa=kappa, init_labels=known_labels) # Compute the groups alg_group_vec = np.argmax(res_matrix, 1) + 1 rstr_alg_group_vec = np.delete(alg_group_vec, indices_for_known_label) # Compute nmi score nmi = normalized_mutual_info_score(rstr_real_group_vec, rstr_alg_group_vec) nmi_vector.append(nmi) return np.mean(nmi_vector) # Multiprocess res_multi = parallel_progbar(nmi_computation, hyperp_list, starmap=True, nprocs=n_cpu) # Get best result max_nmi = max(res_multi) id_max_nmi = res_multi.index(max_nmi) # If nmi is better, write it if max_nmi > nmi_train: nmi_train = max_nmi best_param_dic = {"dist_option": dist_option, "exch_mat_opt": exch_mat_opt, "exch_range": exch_range, "alpha": hyperp_list[id_max_nmi][0], "beta": hyperp_list[id_max_nmi][1], "kappa": hyperp_list[id_max_nmi][2]} print(f"New best: {nmi_train}, {best_param_dic}")
times = {'m': 'Morning', 'e': 'Evening'} if __name__ == '__main__': base_url = 'https://www.biblegateway.com/audio/devotional/morning-and-evening' jobs = [(month, day, time) for month in range(12) for day in range(31) for time in 'me'] base_path = './output/morneve_biblegateway' def fetch_mp3(month, day, time): if day > month_max[month]: return path = os.path.join(base_path, months[month], str(day + 1), times[time], 'biblegateway.mp3') if os.path.exists(path): return signature = f'{month+1:02d}{day+1:02d}{time}' listening_page = f'{base_url}/{signature}' listening_page = requests.get(listening_page).content.decode() mp3_url = re.search(f'(http(?:s)?://.+?{signature}.+?\\.mp3)', listening_page) if mp3_url: mp3_data = requests.get(mp3_url.group(1)).content if len(mp3_data) > 100: os.makedirs(os.path.dirname(path), exist_ok=True) open(path, 'wb').write(mp3_data) parallel_progbar(fetch_mp3, jobs, starmap=True)
# Compute the extended version of the matrix d_ext_mat, token_list, _ = type_to_token_matrix_expansion( text_file_path, d_mat, type_list) # the z_autocor function def z_autocor(exch_range): # Compute the exchange and transition matrices exch_mat, w_mat = exchange_and_transition_matrices( len(token_list), exch_mat_opt=exch_mat_opt, exch_range=exch_range) # Compute the autocorrelation index autocor_index, theoretical_mean, theoretical_var = autocorrelation_index( d_ext_mat, exch_mat, w_mat) # Z score for autocor z_ac = (autocor_index - theoretical_mean) / np.sqrt(theoretical_var) # Return return z_ac # Run on multiprocess z_autocor_vec = parallel_progbar(z_autocor, exch_range_window, nprocs=n_cpu) with open(output_file_name, "a") as output_file: for z_val in z_autocor_vec: output_file.write(f", {z_val}") output_file.write("\n")