def test_sick_diag_ubm(sick_dict, sick_corpus, generated_dir): a = TrainableAligner(sick_corpus, sick_dict, os.path.join(generated_dir, 'sick_output'), temp_directory=os.path.join(generated_dir, 'sickcorpus')) a.train_diag_ubm()
def test_sick_mono(sick_dict, sick_corpus, generated_dir): a = TrainableAligner(sick_corpus, sick_dict, os.path.join(generated_dir, 'sick_output'), temp_directory=os.path.join(generated_dir, 'sickcorpus'), skip_input=True) a.train_mono()
def test_sick_nnet(sick_dict, sick_corpus, generated_dir, nnet_train_config): shutil.rmtree(sick_corpus.output_directory, ignore_errors=True) os.makedirs(sick_corpus.output_directory, exist_ok=True) nnet_train_config, align_config = nnet_train_config data_directory = os.path.join(generated_dir, 'temp', 'nnet_test') shutil.rmtree(data_directory, ignore_errors=True) a = TrainableAligner(sick_corpus, sick_dict, nnet_train_config, align_config, os.path.join(generated_dir, 'sick_output'), temp_directory=data_directory) a.train()
def test_sick_nnet_basic(sick_dict, sick_corpus, generated_dir): a = TrainableAligner(sick_corpus, sick_dict, os.path.join(generated_dir, 'sick_output'), temp_directory=os.path.join(generated_dir, 'sickcorpus')) a.train_nnet_basic() a.export_textgrids()
def test_sick_tri_fmllr(sick_dict, sick_corpus, generated_dir): a = TrainableAligner(sick_corpus, sick_dict, os.path.join(generated_dir, 'sick_output'), temp_directory=os.path.join(generated_dir, 'sickcorpus'), skip_input=True) a.train_tri_fmllr() a.export_textgrids()
def align_corpus(args): if not args.temp_directory: temp_dir = TEMP_DIR else: temp_dir = os.path.expanduser(args.temp_directory) corpus_name = os.path.basename(args.corpus_directory) if corpus_name == '': args.corpus_directory = os.path.dirname(args.corpus_directory) corpus_name = os.path.basename(args.corpus_directory) data_directory = os.path.join(temp_dir, corpus_name) conf_path = os.path.join(data_directory, 'config.yml') if os.path.exists(conf_path): with open(conf_path, 'r') as f: conf = yaml.load(f) else: conf = { 'dirty': False, 'begin': time.time(), 'version': __version__, 'type': 'train_and_align', 'corpus_directory': args.corpus_directory, 'dictionary_path': args.dictionary_path } if getattr(args, 'clean', False) \ or conf['dirty'] or conf['type'] != 'train_and_align' \ or conf['corpus_directory'] != args.corpus_directory \ or conf['version'] != __version__ \ or conf['dictionary_path'] != args.dictionary_path: shutil.rmtree(data_directory, ignore_errors=True) os.makedirs(data_directory, exist_ok=True) os.makedirs(args.output_directory, exist_ok=True) try: corpus = Corpus(args.corpus_directory, data_directory, speaker_characters=args.speaker_characters, num_jobs=getattr(args, 'num_jobs', 3), debug=getattr(args, 'debug', False), ignore_exceptions=getattr(args, 'ignore_exceptions', False)) if corpus.issues_check: print('WARNING: Some issues parsing the corpus were detected. ' 'Please run the validator to get more information.') dictionary = Dictionary(args.dictionary_path, data_directory, word_set=corpus.word_set) utt_oov_path = os.path.join(corpus.split_directory(), 'utterance_oovs.txt') if os.path.exists(utt_oov_path): shutil.copy(utt_oov_path, args.output_directory) oov_path = os.path.join(corpus.split_directory(), 'oovs_found.txt') if os.path.exists(oov_path): shutil.copy(oov_path, args.output_directory) if args.config_path: train_config, align_config = train_yaml_to_config(args.config_path) else: train_config, align_config = load_basic_train() a = TrainableAligner(corpus, dictionary, train_config, align_config, args.output_directory, temp_directory=data_directory) a.verbose = args.verbose a.train() a.export_textgrids() if args.output_model_path is not None: a.save(args.output_model_path) except: conf['dirty'] = True raise finally: with open(conf_path, 'w') as f: yaml.dump(conf, f)
def align_corpus_no_dict(corpus_dir, output_directory, temp_dir, output_model_path, args): if not temp_dir: temp_dir = TEMP_DIR else: temp_dir = os.path.expanduser(temp_dir) corpus_name = os.path.basename(corpus_dir) data_directory = os.path.join(temp_dir, corpus_name) if args.clean: shutil.rmtree(data_directory, ignore_errors = True) shutil.rmtree(output_directory, ignore_errors = True) os.makedirs(data_directory, exist_ok = True) os.makedirs(output_directory, exist_ok = True) corpus = Corpus(corpus_dir, data_directory, args.speaker_characters, num_jobs = args.num_jobs) print(corpus.speaker_utterance_info()) dictionary = no_dictionary(corpus, data_directory) dictionary.write() corpus.write() corpus.create_mfccs() corpus.setup_splits(dictionary) mono_params = {'align_often': not args.fast} tri_params = {'align_often': not args.fast} tri_fmllr_params = {'align_often': not args.fast} a = TrainableAligner(corpus, dictionary, output_directory, temp_directory = data_directory, mono_params = mono_params, tri_params = tri_params, tri_fmllr_params = tri_fmllr_params, num_jobs = args.num_jobs) a.verbose = args.verbose a.train_mono() a.export_textgrids() a.train_tri() a.export_textgrids() a.train_tri_fmllr() a.export_textgrids() if output_model_path is not None: a.save(output_model_path)
def align_corpus_no_dict(args, skip_input=False): if not args.temp_directory: temp_dir = TEMP_DIR else: temp_dir = os.path.expanduser(args.temp_directory) corpus_name = os.path.basename(args.corpus_directory) data_directory = os.path.join(temp_dir, corpus_name) if args.clean: shutil.rmtree(data_directory, ignore_errors=True) shutil.rmtree(args.output_directory, ignore_errors=True) os.makedirs(data_directory, exist_ok=True) os.makedirs(args.output_directory, exist_ok=True) corpus = Corpus(args.corpus_directory, data_directory, args.speaker_characters, num_jobs=getattr(args, 'num_jobs', 3), debug=getattr(args, 'debug', False), ignore_exceptions=getattr(args, 'ignore_exceptions', False)) print(corpus.speaker_utterance_info()) dictionary = no_dictionary(corpus, data_directory) mono_params = {'align_often': not args.fast} tri_params = {'align_often': not args.fast} tri_fmllr_params = {'align_often': not args.fast} a = TrainableAligner(corpus, dictionary, args.output_directory, temp_directory=data_directory, mono_params=mono_params, tri_params=tri_params, tri_fmllr_params=tri_fmllr_params, num_jobs=args.num_jobs, debug=args.debug) a.verbose = args.verbose a.train_mono() a.export_textgrids() a.train_tri() a.export_textgrids() a.train_tri_fmllr() a.export_textgrids() if args.output_model_path is not None: a.save(args.output_model_path)
def align_corpus(args, skip_input=False): if not args.temp_directory: temp_dir = TEMP_DIR else: temp_dir = os.path.expanduser(args.temp_directory) corpus_name = os.path.basename(args.corpus_directory) if corpus_name == '': args.corpus_directory = os.path.dirname(args.corpus_directory) corpus_name = os.path.basename(args.corpus_directory) data_directory = os.path.join(temp_dir, corpus_name) conf_path = os.path.join(data_directory, 'config.yml') if os.path.exists(conf_path): with open(conf_path, 'r') as f: conf = yaml.load(f) else: conf = { 'dirty': False, 'begin': time.time(), 'version': __version__, 'type': 'train_and_align', 'corpus_directory': args.corpus_directory, 'dictionary_path': args.dictionary_path } if getattr(args, 'clean', False) \ or conf['dirty'] or conf['type'] != 'train_and_align' \ or conf['corpus_directory'] != args.corpus_directory \ or conf['version'] != __version__ \ or conf['dictionary_path'] != args.dictionary_path: shutil.rmtree(data_directory, ignore_errors=True) shutil.rmtree(args.output_directory, ignore_errors=True) os.makedirs(data_directory, exist_ok=True) os.makedirs(args.output_directory, exist_ok=True) try: corpus = Corpus(args.corpus_directory, data_directory, speaker_characters=args.speaker_characters, num_jobs=getattr(args, 'num_jobs', 3), debug=getattr(args, 'debug', False), ignore_exceptions=getattr(args, 'ignore_exceptions', False)) dictionary = Dictionary(args.dictionary_path, data_directory, word_set=corpus.word_set) utt_oov_path = os.path.join(corpus.split_directory, 'utterance_oovs.txt') if os.path.exists(utt_oov_path): shutil.copy(utt_oov_path, args.output_directory) oov_path = os.path.join(corpus.split_directory, 'oovs_found.txt') if os.path.exists(oov_path): shutil.copy(oov_path, args.output_directory) mono_params = {'align_often': not args.fast} tri_params = {'align_often': not args.fast} tri_fmllr_params = {'align_often': not args.fast} a = TrainableAligner(corpus, dictionary, args.output_directory, temp_directory=data_directory, mono_params=mono_params, tri_params=tri_params, tri_fmllr_params=tri_fmllr_params, num_jobs=args.num_jobs) a.verbose = args.verbose a.train_mono() a.export_textgrids() a.train_tri() a.export_textgrids() a.train_tri_fmllr() a.export_textgrids() if args.output_model_path is not None: a.save(args.output_model_path) except: conf['dirty'] = True raise finally: with open(conf_path, 'w') as f: yaml.dump(conf, f)
def align_corpus(args): if not args.temp_directory: temp_dir = TEMP_DIR else: temp_dir = os.path.expanduser(args.temp_directory) corpus_name = os.path.basename(args.corpus_directory) if corpus_name == "": args.corpus_directory = os.path.dirname(args.corpus_directory) corpus_name = os.path.basename(args.corpus_directory) data_directory = os.path.join(temp_dir, corpus_name) conf_path = os.path.join(data_directory, "config.yml") if os.path.exists(conf_path): with open(conf_path, "r") as f: conf = yaml.load(f) else: conf = { "dirty": False, "begin": time.time(), "version": __version__, "type": "train_and_align", "corpus_directory": args.corpus_directory, "dictionary_path": args.dictionary_path, } if ( getattr(args, "clean", False) or conf["dirty"] or conf["type"] != "train_and_align" or conf["corpus_directory"] != args.corpus_directory or conf["version"] != __version__ or conf["dictionary_path"] != args.dictionary_path ): shutil.rmtree(data_directory, ignore_errors=True) os.makedirs(data_directory, exist_ok=True) os.makedirs(args.output_directory, exist_ok=True) try: corpus = Corpus( args.corpus_directory, data_directory, speaker_characters=args.speaker_characters, num_jobs=getattr(args, "num_jobs", 3), debug=getattr(args, "debug", False), ignore_exceptions=getattr(args, "ignore_exceptions", False), ) if corpus.issues_check: print( "WARNING: Some issues parsing the corpus were detected. " "Please run the validator to get more information." ) dictionary = Dictionary( args.dictionary_path, data_directory, word_set=corpus.word_set ) utt_oov_path = os.path.join(corpus.split_directory(), "utterance_oovs.txt") if os.path.exists(utt_oov_path): shutil.copy(utt_oov_path, args.output_directory) oov_path = os.path.join(corpus.split_directory(), "oovs_found.txt") if os.path.exists(oov_path): shutil.copy(oov_path, args.output_directory) if args.config_path: train_config, align_config = train_yaml_to_config(args.config_path) else: train_config, align_config = load_basic_train() a = TrainableAligner( corpus, dictionary, train_config, align_config, args.output_directory, temp_directory=data_directory, ) a.verbose = args.verbose a.train() a.export_textgrids() if args.output_model_path is not None: a.save(args.output_model_path) except: conf["dirty"] = True raise finally: with open(conf_path, "w") as f: yaml.dump(conf, f)
def align_corpus(args): if not args.temp_directory: temp_dir = TEMP_DIR else: temp_dir = os.path.expanduser(args.temp_directory) corpus_name = os.path.basename(args.corpus_directory) if corpus_name == '': args.corpus_directory = os.path.dirname(args.corpus_directory) corpus_name = os.path.basename(args.corpus_directory) data_directory = os.path.join(temp_dir, corpus_name) conf_path = os.path.join(data_directory, 'config.yml') if os.path.exists(conf_path): with open(conf_path, 'r') as f: conf = yaml.load(f) else: conf = { 'dirty': False, 'begin': time.time(), 'version': __version__, 'type': 'train_and_align', 'corpus_directory': args.corpus_directory, 'dictionary_path': args.dictionary_path } if getattr(args, 'clean', False) \ or conf['dirty'] or conf['type'] != 'train_and_align' \ or conf['corpus_directory'] != args.corpus_directory \ or conf['version'] != __version__ \ or conf['dictionary_path'] != args.dictionary_path: shutil.rmtree(data_directory, ignore_errors=True) shutil.rmtree(args.output_directory, ignore_errors=True) os.makedirs(data_directory, exist_ok=True) os.makedirs(args.output_directory, exist_ok=True) try: corpus = Corpus(args.corpus_directory, data_directory, speaker_characters=args.speaker_characters, num_jobs=getattr(args, 'num_jobs', 3), debug=getattr(args, 'debug', False), ignore_exceptions=getattr(args, 'ignore_exceptions', False)) dictionary = Dictionary(args.dictionary_path, data_directory, word_set=corpus.word_set) utt_oov_path = os.path.join(corpus.split_directory, 'utterance_oovs.txt') if os.path.exists(utt_oov_path): shutil.copy(utt_oov_path, args.output_directory) oov_path = os.path.join(corpus.split_directory, 'oovs_found.txt') if os.path.exists(oov_path): shutil.copy(oov_path, args.output_directory) mono_params = {'align_often': not args.fast} tri_params = {'align_often': not args.fast} tri_fmllr_params = {'align_often': not args.fast} a = TrainableAligner(corpus, dictionary, args.output_directory, temp_directory=data_directory, mono_params=mono_params, tri_params=tri_params, tri_fmllr_params=tri_fmllr_params, num_jobs=args.num_jobs, skip_input=getattr(args, 'quiet', False), nnet=getattr(args, 'artificial_neural_net', False)) a.verbose = args.verbose # GMM training (looks like it needs to be done either way, as a starter for nnet) a.train_mono() a.export_textgrids() a.train_tri() a.export_textgrids() a.train_tri_fmllr() a.export_textgrids() # nnet training if args.artificial_neural_net: # Do nnet training a.train_lda_mllt() #a.train_diag_ubm() # Uncomment to train i-vector extractor #a.ivector_extractor() # Uncomment to train i-vector extractor (integrate with argument eventually) a.train_nnet_basic() a.export_textgrids() if args.output_model_path is not None: a.save(args.output_model_path) except: conf['dirty'] = True raise finally: with open(conf_path, 'w') as f: yaml.dump(conf, f)
def align_corpus_no_dict(corpus_dir, output_directory, temp_dir, output_model_path, args): if temp_dir == '': temp_dir = TEMP_DIR else: temp_dir = os.path.expanduser(temp_dir) corpus_name = os.path.basename(corpus_dir) data_directory = os.path.join(temp_dir, corpus_name) if args.clean: shutil.rmtree(data_directory, ignore_errors=True) shutil.rmtree(output_directory, ignore_errors=True) os.makedirs(data_directory, exist_ok=True) os.makedirs(output_directory, exist_ok=True) corpus = Corpus(corpus_dir, data_directory, args.speaker_characters, num_jobs=args.num_jobs) print(corpus.speaker_utterance_info()) dictionary = no_dictionary(corpus, data_directory) dictionary.write() corpus.write() corpus.create_mfccs() corpus.setup_splits(dictionary) mono_params = {'align_often': not args.fast} tri_params = {'align_often': not args.fast} tri_fmllr_params = {'align_often': not args.fast} a = TrainableAligner(corpus, dictionary, output_directory, temp_directory=data_directory, mono_params=mono_params, tri_params=tri_params, tri_fmllr_params=tri_fmllr_params, num_jobs=args.num_jobs) a.verbose = args.verbose a.train_mono() a.export_textgrids() a.train_tri() a.export_textgrids() a.train_tri_fmllr() a.export_textgrids() if output_model_path is not None: a.save(output_model_path)
def test_sick_mono(sick_dict, sick_corpus, generated_dir): a = TrainableAligner(sick_corpus, sick_dict, os.path.join(generated_dir,'sick_output'), temp_directory = os.path.join(generated_dir,'sickcorpus')) a.train_mono()
def test_sick_tri_fmllr(sick_dict, sick_corpus, generated_dir): a = TrainableAligner(sick_corpus, sick_dict, os.path.join(generated_dir,'sick_output'), temp_directory = os.path.join(generated_dir,'sickcorpus')) a.train_tri_fmllr() a.export_textgrids()
def align_corpus(args): if not args.temp_directory: temp_dir = TEMP_DIR else: temp_dir = os.path.expanduser(args.temp_directory) corpus_name = os.path.basename(args.corpus_directory) if corpus_name == '': args.corpus_directory = os.path.dirname(args.corpus_directory) corpus_name = os.path.basename(args.corpus_directory) data_directory = os.path.join(temp_dir, corpus_name) conf_path = os.path.join(data_directory, 'config.yml') if os.path.exists(conf_path): with open(conf_path, 'r') as f: conf = yaml.load(f) else: conf = {'dirty': False, 'begin': time.time(), 'version': __version__, 'type': 'train_and_align', 'corpus_directory': args.corpus_directory, 'dictionary_path': args.dictionary_path} if getattr(args, 'clean', False) \ or conf['dirty'] or conf['type'] != 'train_and_align' \ or conf['corpus_directory'] != args.corpus_directory \ or conf['version'] != __version__ \ or conf['dictionary_path'] != args.dictionary_path: shutil.rmtree(data_directory, ignore_errors=True) os.makedirs(data_directory, exist_ok=True) os.makedirs(args.output_directory, exist_ok=True) try: corpus = Corpus(args.corpus_directory, data_directory, speaker_characters=args.speaker_characters, num_jobs=getattr(args, 'num_jobs', 3), debug=getattr(args, 'debug', False), ignore_exceptions=getattr(args, 'ignore_exceptions', False)) if corpus.issues_check: print('WARNING: Some issues parsing the corpus were detected. ' 'Please run the validator to get more information.') dictionary = Dictionary(args.dictionary_path, data_directory, word_set=corpus.word_set) utt_oov_path = os.path.join(corpus.split_directory(), 'utterance_oovs.txt') if os.path.exists(utt_oov_path): shutil.copy(utt_oov_path, args.output_directory) oov_path = os.path.join(corpus.split_directory(), 'oovs_found.txt') if os.path.exists(oov_path): shutil.copy(oov_path, args.output_directory) if args.config_path: train_config, align_config = train_yaml_to_config(args.config_path) else: train_config, align_config = load_basic_train() a = TrainableAligner(corpus, dictionary, train_config, align_config, args.output_directory, temp_directory=data_directory) a.verbose = args.verbose a.train() a.export_textgrids() if args.output_model_path is not None: a.save(args.output_model_path) except: conf['dirty'] = True raise finally: with open(conf_path, 'w') as f: yaml.dump(conf, f)