ScoreList.append( gamma * float(Result10[i][score_ind]) + (1 - gamma) * float(logProb[i][0])) # if bestScoreInd = 1 ScoreList.append( gamma * float(Result15[i][score_ind]) + (1 - gamma) * float(logProb[i][1])) # if bestScoreInd = 2 ScoreList.append( gamma * float(Result20[i][score_ind]) + (1 - gamma) * float(logProb[i][2])) # if bestScoreInd = 3 bestScoreInd = ScoreList.index(min(ScoreList)) # Best Hypothesis UttId = ResultClean[i][fName_ind] UttId = UttId[::-1].replace("/", "-", 1)[::-1] UttId = UttId.split('/', 2)[-1] BestHypo.append(hypList[bestScoreInd] + ' (' + UttId + ')\n') #print best_utt MDC_Hyp = outDir + "MDC_Result_Score.txt" print("\n Writing MDC results in " + MDC_Hyp) dump.TextWrite(BestHypo, MDC_Hyp) print 'Finish, now Calculating Error Rate, please wait \n' RefFile = BaseDir + "RefClean.txt" out_File = outDir + "Aligned_MDC_Score_WERReslts.txt" perl_script = subprocess.Popen( ["perl", "./word_align.pl", '-silent', MDC_Hyp, RefFile, out_File]) perl_script.wait()
#!/usr/bin/python2.7 import StoreResults as dump ExpName = "WSJ" #SNR_Level = "White50db" TotalNoOfFiles = 35 BaseDir = "/Users/Azhar/Desktop/MDC_Experiments/" + ExpName + "/" inputFile = BaseDir + "Clean-1-1.log" outFile = BaseDir + "Reference.txt" outString = [] with open(inputFile,'r') as inFile, open(outFile,'w') as out: lines = inFile.readlines() flag = False for line in lines: if(flag): uttID = line.split('/',1)[-1] uttID = "(" + uttID.split(' ',1)[0] + ")" uttID = uttID.replace('/','-') uttID = uttID.replace('_1','_5') line = line.split('(',1)[0] + uttID +'\n' outString.append(line) flag = False if(line.find('INFO: batch.c(762):') != -1): flag = True print outString dump.TextWrite(outString, outFile)
} ListOfFinalResults.append(FinalResult) #print 'Best hypothesis: ', hypothesis.hypstr, " model score: ", hypothesis.best_score, " confidence: ", hypothesis.prob LatticeFile = outLattice + fNameOnly.replace("/", '-') #print 'LatticeFile: ' + LatticeFile decoder.get_lattice().write(LatticeFile + '.lat') decoder.get_lattice().write_htk(LatticeFile + '.htk') i = i + 1 k = k + 1 if (k == 17): k = 0 sys.stdout.write('*') progress = 100 * i / TotalNoOfFiles #sys.stdout.write("Progress: %d%% \r" % (progress) ) #sys.stdout.write("Input SNR: %d" % (snr) +" AM: "+ AM +" File: " + fNameOnly +" Progress: %d%% \r" % (progress) ) sys.stdout.flush() # Running perl WER test print "\n" dump.TextWrite(HypText, outDir + currentModel + ".txt") dump.CSVDictWrite(ListOfFinalResults, outDir + "/All_" + currentModel + ".csv") hypFile = outDir + currentModel + ".txt" RefFile = BaseDir + "RefClean.txt" out_File = outDir + "WERReslts_" + currentModel + ".txt" print 'Finish, now Calculating Error Rate, please wait \n' perl_script = subprocess.Popen( ["perl", "./word_align.pl", '-silent', hypFile, RefFile, out_File]) perl_script.wait() print '\n'
#print fNameOnly HypText.append(hyp + " (" + UttId + ")\n") # print("Name: "+fNameOnly+ " Hyp:" + hyp+ " Score:"+ score+" Confidence:"+ confidence) FinalResult = {"Name":fNameOnly, "Hyp": hyp, "Score": score, "Confidence": confidence} ListOfFinalResults.append(FinalResult) prev_line = line OutDir = BaseOutDir + currentModel +"/" # Check for exist outdir if not os.path.exists(os.path.dirname(OutDir)): try: os.makedirs(os.path.dirname(OutDir)) except OSError as exc: # Guard against race condition if exc.errno != errno.EEXIST: raise print ("Storing result in: " +OutDir) dump.TextWrite(HypText, currentModel+".txt") dump.CSVDictWrite(ListOfFinalResults, OutDir+"All_"+currentModel+".csv") ''' hypFile = currentModel+".txt" RefFile = BaseDir+"RefClean.txt" out_File = BaseOutDir+"WERReslts_"+currentModel+".txt" print 'Finish, now Calculating Error Rate, please wait \n' perl_script = subprocess.Popen(["perl", "./word_align.pl",'-silent',hypFile, RefFile, out_File]) perl_script.wait() print '\n' '''