t1 = file1.readline().rstrip() if not (s and s1 and t and t1): break wer = [] no_of_patches = 0.0 if mode == 'compare': wer2 = [] no_of_patches2 = 0.0 tgt_sentences = t1.lower() patcher = Patcher(apertium, s, s1, t, use_caching, cache_db_file) patches = patcher.patch(min_len, max_len, grounded, lp_dir) best_patch = patcher.get_best_patch() best_patch_with_cam = patcher.get_best_patch(True) #Best patched result covering all mismatches if best_patch: patches.append(best_patch) all_patches = patches[:] if not grounded: unpatched = patches[0] all_patches.pop(0) else: unpatched = (t1,) if best_only: up_wer = 1.0 - FMS(unpatched[0].lower(), tgt_sentences).calculate_using_wanger_fischer()
use_caching = True if cache_db_file else False #Initiate and check Apertium apertium = Apertium(lps[0], lps[1]) (out, err) = apertium.check_installations(lp_dir) assertion(out, err) #Calculate FMS between S and S1. fms = FMS(s_sentence, s1_sentence).calculate_using_wagner_fischer() #Exit if low FMS. assertion(fms >= min_fms, "Sentences have low fuzzy match score of %.02f." %fms) patcher = Patcher(apertium, s_sentence, s1_sentence, t_sentence, use_caching, cache_db_file) patches = patcher.patch(min_len, max_len, grounded, lp_dir) best_patch = patcher.get_best_patch() got_patches = False got_patches = print_patch(best_patch, cover_all, verbose, show_traces) if not best_only: for patch in patches: got_patches = print_patch(patch, cover_all, verbose, show_traces) | got_patches conditions = "No possible repairs" if cover_all: conditions += " which covers all mismatches" if grounded: if conditions != "No possible repairs": conditions += " and" else: