def run_gen(full_path, dur_path, lf0_path, start, stop): for sett in Utility.char_range(start, stop): dur_set_path = '{}/{}/'.format(dur_path, sett) full_set_path = '{}/{}/'.format(full_path, sett) lf0_set_path = '{}/{}/'.format(lf0_path, sett) if not (Utility.is_dir_exists(dur_set_path) & Utility.is_dir_exists(full_set_path) & Utility.is_dir_exists(lf0_set_path)): print 'No set : ', sett continue for f in Utility.list_file(full_set_path): if f.startswith('.'): continue print f base = Utility.get_basefilename(f) dur_list = '{}/{}.dur'.format(dur_set_path, base) lf0_list = '{}/{}/'.format(lf0_set_path, base) full_list = '{}/{}.lab'.format(full_set_path, base) run_make_obj_for_an_utterance(full_list, dur_list, lf0_list) # sys.exit(0) pass
for ch in Utility.char_range('a', 'z'): if ch == 'j' : continue set_stress_path = '{}/{} lab/'.format(stress_path, ch) set_utt_base_path = '{}/{}/'.format(utt_base_path, ch) set_syllable_full_path = '{}/{}/'.format(syllable_full_path, ch) set_out_path = '{}/{}/'.format(out_path, ch) Utility.make_directory(set_out_path) if Utility.is_dir_exists(set_stress_path) & Utility.is_dir_exists(set_utt_base_path): print ch for i in xrange(1, 51): name = 'tscsd{}{}'.format(ch, Utility.fill_zero(i, 2)) yaml_filename = '{}/{}.utt.yaml'.format(set_utt_base_path, name ) if not Utility.is_file_exist(yaml_filename): continue full_file = '{}/{}.lab'.format(set_syllable_full_path, name) count = [0] yaml = Utility.yaml_load(yaml_filename) add_stress(yaml, count, name)
out_path = '/work/w2/decha/Data/GPR_speccom_data/lf0_in_syllable/' plot_out_path = '/work/w2/decha/Data/GPR_speccom_data/f0_in_syllable_plot/' start = 'k' stop = 'z' Utility.make_directory(plot_out_path) for sett in Utility.char_range(start, stop): print sett set_path = '{}/{} lab/'.format(stress_data_path, sett) if not Utility.is_dir_exists(set_path): print 'Inexist : {}'.format(set_path) continue lf0_set_path = '{}/{}/'.format(lf0_path, sett) out_set_path = '{}/{}/'.format(out_path, sett) plot_set_out = '{}/{}/'.format(plot_out_path, sett) Utility.make_directory(plot_set_out) for f in Utility.list_file(set_path): if f.startswith('.'): continue
n_clusters = xrange(2, 6) n_neighbor = [0.025, 0.05, 0.075, 0.1, 0.2] # base_path = '/work/w23/decha/decha_w23/Second_Journal/Unsupervised_learning_result/15_Agglomerative_clustering/' base_path = '/work/w23/decha/decha_w23/Second_Journal/Unsupervised_learning_result/15_Agglomerative_clustering_pca/' tex_file = [] tex_file.append('\\documentclass{article}') tex_file.append('\\usepackage{geometry}') tex_file.append('\\usepackage[usenames, dvipsnames]{color}') tex_file.append('\\geometry{margin=1cm}') tex_file.append('\\usepackage[english]{babel}') tex_file.append('\\usepackage{graphicx}') tex_file.append('\\begin{document}') for t in xrange(5): for f in ['nasal', 'no', 'non-nasal']: # for v in ['long', 'short']: name = '{}_{}'.format(t, f) name_path = '{}/{}/'.format(base_path, name) if Utility.is_dir_exists(name_path): print name gen_latex(name_path, n_clusters, n_neighbor, name) tex_file.append('\\end{document}') Utility.write_to_file_line_by_line('{}/all.tex'.format(base_path), tex_file) pass
alpha, beta, rmse, l) pass if __name__ == '__main__': predictive = '/work/w21/decha/Interspeech_2017/Result/03_Given_syllable_dct_with_weigth/num_dct_cov_7/' org_path = '/work/w2/decha/Data/GPR_speccom_data/data_before_remove_silence/lf0/tsc/sd/j/' tmp_path = './tmp/' Utility.make_directory(tmp_path) basename = 'tscsdj' vuv_path = '/work/w21/decha/Interspeech_2017/GPR_data/450/param_align/lf0/param_mean/' for alpha in np.arange(0.1, 2, 0.1): for beta in np.arange(0.1, 2, 0.1): alpha_beta_path = '{}/alpha_{}_beta_{}/'.format( predictive, alpha, beta) if Utility.is_dir_exists(alpha_beta_path): run_cal_distortion(basename, tmp_path, alpha_beta_path, alpha, beta) pass