def loadData(trainOutput, trainInput, dualParams, kernelParams): global cMMR params = mmr_setparams.cls_params() nview = 1 params.ninputview = nview cMMR = mmr_mmr_cls.cls_mmr(params.ninputview) nfold = cMMR.nfold nrepeat = cMMR.nrepeat ## @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ cdata_store=armar_load_data.cls_label_files(trainOutput,trainInput, \ dualParams,kernelParams) cdata_store.load_mmr(cMMR) mdata = cMMR.mdata ## ############################################################ xselector = np.ones(mdata) xselector[-1] = 0 ifold = 0 cMMR.split_train_test(xselector, ifold) cMMR.compute_kernels() ## cMMR.Y0=cMMR.YKernel.get_train(cMMR.itrain) ## candidates cMMR.csolver = mmr_solver_cls.cls_mmr_solver()
def loadData(trainOutput,trainInput,dualParams,kernelParams): global cMMR params=mmr_setparams.cls_params() nview=1 params.ninputview=nview cMMR=mmr_mmr_cls.cls_mmr(params.ninputview) nfold=cMMR.nfold nrepeat=cMMR.nrepeat ## @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ cdata_store=armar_load_data.cls_label_files(trainOutput,trainInput, \ dualParams,kernelParams) cdata_store.load_mmr(cMMR) mdata=cMMR.mdata ## ############################################################ xselector=np.ones(mdata) xselector[-1]=0 ifold=0 cMMR.split_train_test(xselector,ifold) cMMR.compute_kernels() ## cMMR.Y0=cMMR.YKernel.get_train(cMMR.itrain) ## candidates cMMR.csolver=mmr_solver_cls.cls_mmr_solver()
def mmr_main(iworkmode): params = mmr_setparams.cls_params() np.set_printoptions(precision=4) dresult = {} ## --------------------------------------------- list_object = ['parts'] nviews = 1 lfile_in = [[[(3, 4, 21)]]] tfile_out = (3, 1, 6) lresult = [] for iobject in range(len(lfile_in)): tfile_in = lfile_in[iobject] for ifeature in range(nviews): params.ninputview = len(tfile_in) cMMR = mmr_mmr_cls.cls_mmr(params.ninputview) nfold = cMMR.nfold nrepeat = cMMR.nrepeat cMMR.xbias = -0.95 - ifeature * 0.05 print('Xbias:', cMMR.xbias) nscore = 4 nipar = 1 if cMMR.crossval_mode == 0: ## random nfold0 = nfold xresult_test = np.zeros((nipar, nrepeat, nfold0)) xresult_train = np.zeros((nipar, nrepeat, nfold0)) xpr = np.zeros((nipar, nrepeat, nfold0, nscore)) elif cMMR.crossval_mode == 1: ## predefined trianing and test nrepeat = 1 nfold0 = 1 xresult_test = np.zeros((nipar, nrepeat, nfold0)) xresult_train = np.zeros((nipar, nrepeat, nfold0)) xpr = np.zeros((nipar, nrepeat, nfold0, nscore)) ## @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ cdata_store = objpart_load_data.cls_label_files() cdata_store.load_mmr(cMMR, tfile_in, tfile_out) mdata = cMMR.mdata ## ----------------------------------------------- print('Output kernel type: ', cMMR.YKernel.kernel_params.kernel_type) for i in range(params.ninputview): print(i, 'Input kernel type: ', cMMR.XKernel[i].kernel_params.kernel_type) ## ------------------------------------------------- xtime = np.zeros(5) ## ############################################################ nparam = 4 ## C,D,par1,par2 xbest_param = np.zeros((nrepeat, nfold0, nparam)) ## xinpar=[0.2,0.3,0.4,0.5,0.6] for iipar in range(nipar): print('===================================================') for irepeat in range(nrepeat): ## split data into training and test if cMMR.crossval_mode == 0: ## random selection xselector = np.floor(np.random.random(mdata) * nfold0) xselector = xselector - (xselector == nfold0) elif cMMR.crossval_mode == 1: ## preddefined training and test xselector = np.zeros(mdata) xselector[cMMR.ifixtrain] = 1 for ifold in range(nfold0): cMMR.split_train_test(xselector, ifold) ## validation to choose the best parameters print('Validation') t0 = time.clock() ## select the kernel to be validated cMMR.set_validation() cvalidation = mmr_validation_cls.cls_mmr_validation() cvalidation.validation_rkernel = cMMR.XKernel[0].title best_param = cvalidation.mmr_validation(cMMR) xtime[0] = time.clock() - t0 print('Best parameters found by validation') print('c: ', best_param.c) print('d: ', best_param.d) print('par1: ', best_param.par1) print('par2: ', best_param.par2) xbest_param[irepeat, ifold, 0] = best_param.c xbest_param[irepeat, ifold, 1] = best_param.d xbest_param[irepeat, ifold, 2] = best_param.par1 xbest_param[irepeat, ifold, 3] = best_param.par2 cMMR.compute_kernels() cMMR.Y0 = cMMR.YKernel.get_train( cMMR.itrain) ## candidates ## training with the best parameters print('Training') print(cMMR.YKernel.kernel_params.kernel_type, \ cMMR.YKernel.kernel_params.ipar1, \ cMMR.YKernel.kernel_params.ipar2) for iview in range(cMMR.ninputview): print(cMMR.XKernel[iview].kernel_params.kernel_type, \ cMMR.XKernel[iview].kernel_params.ipar1, \ cMMR.XKernel[iview].kernel_params.ipar2) t0 = time.clock() cOptDual = cMMR.mmr_train() xtime[1] = time.clock() - t0 ## cls transfers the dual variables to the test procedure ## compute tests ## check the train accuracy print('Test') cPredictTra = cMMR.mmr_test(cOptDual, itraindata=0) ## counts the proportion the ones predicted correctly ## ###################################### if cMMR.itestmode == 2: print('Test knn') ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \ cPredictTra) else: ypred = cPredictTra.zPred cEvaluationTra= \ mmr_eval_binvector(cMMR.YKernel.get_train(cMMR.itrain), \ ypred) xresult_train[iipar, irepeat, ifold] = cEvaluationTra.accuracy print('>>>>>>>>>>>\n', cEvaluationTra.confusion) ## ###################################### ## check the test accuracy t0 = time.clock() cPredictTes = cMMR.mmr_test(cOptDual, itraindata=1) ## counts the proportion the ones predicted correctly if cMMR.itestmode == 2: ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \ cPredictTes) else: ypred = cPredictTes.zPred ## cEvaluationTes=mmr_eval_binvector(cData.YTest,cPredictTes.zPred) cEvaluationTes= \ mmr_eval_binvector(cMMR.YKernel.get_test(cMMR.itest), \ ypred) xtime[2] = time.clock() - t0 xresult_test[iipar, irepeat, ifold] = cEvaluationTes.accuracy xpr[iipar, irepeat, ifold, 0] = cEvaluationTes.precision xpr[iipar, irepeat, ifold, 1] = cEvaluationTes.recall xpr[iipar, irepeat, ifold, 2] = cEvaluationTes.f1 xpr[iipar, irepeat, ifold, 3] = cEvaluationTes.accuracy print(cEvaluationTes.confusion) print(cEvaluationTes.classconfusion) try: xclassconfusion += cEvaluationTes.classconfusion except: (n, n) = cEvaluationTes.classconfusion.shape xclassconfusion = np.zeros((n, n)) xclassconfusion += cEvaluationTes.classconfusion ## #################################### print('Parameter:',iipar,'Repetition: ',irepeat, \ 'Fold: ',ifold) mmr_report('Result on one fold', xresult_train[iipar,irepeat,ifold], \ xresult_test[iipar,irepeat,ifold], \ xpr[iipar,irepeat,ifold,:]) print( np.sum(xpr[iipar, irepeat, :ifold + 1, :], 0) / (ifold + 1)) mmr_report('Result on one repetition', np.mean(xresult_train[iipar,irepeat,:]), \ np.mean(xresult_test[iipar,irepeat,:]), \ np.mean(xpr[iipar,irepeat,:,:],0)) mmr_report('Result on all repetitions @@@@@@@', np.mean(xresult_train[iipar,:,:].flatten()), \ np.mean(xresult_test[iipar,:,:].flatten()), \ np.mean(np.mean(xpr[iipar,:,:,:],0),0)) print('Average best parameters') ## sfield=dir(best_param) xlabels = ('c', 'd', 'par1', 'par2') for i in range(nparam): ## print(sfield[i]) print(xlabels[i],': ',np.mean(xbest_param[:,:,i]), \ '(',np.std(xbest_param[:,:,i]),')') print('xtime:', xtime) sys.stdout.flush() dresult[ifeature] = (cMMR.xbias, np.mean(np.mean(xpr[iipar, :, :, :], 0), 0)) for sfeature_type, tresult in dresult.items(): ## xhead=cMMR.xbias headkey = tfile_in[0][0] xhead = cdata_store.dirvar[headkey][0] + ', ' + cdata_store.dirvar[ headkey][1] lresult.append((xhead, tresult)) ## lresult.sort() ## for litem in lresult: ## print(litem) print('\\begin{tabular}{l|rrr}') print('& \\multicolumn{3}{c}{' + 'Objects' + '} \\\\') print('Feature type & Precision & Recall & F1 \\\\ \\hline') for litem in lresult: print(litem[0],' & ','%6.4f'%litem[1][1][0], \ ' & ','%6.4f'%litem[1][1][1],' & ','%6.4f'%litem[1][1][2],' \\\\') print('\\end{tabular}') print(xclassconfusion) print('Bye') return
def mmr_validation_body(self,cMMR): np.set_printoptions(precision=4) ## mtrain=cMMR.mtrain best_param=mmr_base_classes.cls_empty_class() best_param.c=0.0 best_param.d=0.0 best_param.par1=0.0 best_param.par2=0.0 xparam=mmr_base_classes.cls_empty_class() cMMRVal=mmr_mmr_cls.cls_mmr(cMMR.ninputview) cMMRVal.XKernel=[None]*cMMR.ninputview cMMR.copy(cMMRVal,cMMR.itrain) ## params.validation.rkernel=cMMRVal.XKernel[0].title if self.validation_rkernel in cMMRVal.dkernels: rkernel=cMMRVal.dkernels[self.validation_rkernel] else: rkernel=cMMRVal.XKernel[0] kernel_type=rkernel.kernel_params.kernel_type kinput=rkernel.crossval if kernel_type==0: ip1min=0.0 ip1max=0.0 ip2min=0.0 ip2max=0.0 ip1step=1.0 ip2step=1.0 elif kernel_type in (1,11,12,2,51): ip1min=kinput.par1min ip1max=kinput.par1max ip2min=kinput.par2min ip2max=kinput.par2max ip1step=kinput.par1step ip2step=kinput.par2step elif kernel_type in (3,31,32,41,53): if kinput.nrange>1: if kinput.par1max>kinput.par1min: dpar= np.power(kinput.par1max/kinput.par1min,1/(kinput.nrange-1)) ip1max=kinput.nrange else: dpar=1.0 ip1max=1.0 else: ip1max=1.0 dpar=1.0 ip1min=1.0 ip2min=kinput.par2min ip2max=kinput.par2max ip1step=1.0 ip2step=kinput.par2step else: ip1min=1.0 ip1max=1.0 ip2min=1.0 ip2max=1.0 ip1step=1.0 ip2step=1.0 ## Validation ## number of validation folds if self.vnfold<2: self.vnfold=2 vnfold=self.vnfold ## number of validation folds vxsel=np.floor(np.random.random(cMMRVal.mdata)*vnfold) vxsel=vxsel-(vxsel==vnfold) ## !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! alternating_test=0 if alternating_test==1: vxsel=np.zeros(cMMRVal.mdata) for i in range(0,cMMRVal.mdata,2): vxsel[i]=1 ## !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! vpredtr=np.zeros(vnfold) ## valid vpred=np.zeros(vnfold) ## train print('C,D,par1,par2,traning accuracy,validation test accuracy') ## scanning the parameter space if cMMR.ieval_type in (0,10): xxmax=-np.inf else: xxmax=np.inf penalty=cMMRVal.penalty.crossval crange=np.arange(penalty.par1min,penalty.par1max+penalty.par1step/2, \ penalty.par1step) drange=np.arange(penalty.par2min,penalty.par2max+penalty.par2step/2, \ penalty.par2step) p1range=np.arange(ip1min,ip1max+ip1step/2,ip1step) p2range=np.arange(ip2min,ip2max+ip2step/2,ip2step) for iC in crange: for iD in drange: for ip1 in p1range: for ip2 in p2range: if kernel_type in (3,31,32,41,53): dpar1=kinput.par1min*dpar**(ip1-1) dpar2=ip2 else: dpar1=ip1 dpar2=ip2 cMMRVal.penalty.c=iC; cMMRVal.penalty.d=iD; rkernel.kernel_params.ipar1=dpar1; rkernel.kernel_params.ipar2=dpar2; for vifold in range(vnfold): cMMRVal.split_train_test(vxsel,vifold) cMMRVal.compute_kernels() cMMRVal.Y0=cMMRVal.YKernel.get_Y0(cMMRVal.itrain) cOptDual=cMMRVal.mmr_train() ## validation training cPredictValTrain=cMMRVal.mmr_test(cOptDual,itraindata=0) ## counts the proportion the ones predicted correctly ## ############################################## if cMMRVal.itestmode==2: ypred=inverse_knn(cMMRVal.YKernel.get_Y0(cMMRVal.itrain), \ cPredictValTrain) else: ypred=cPredictValTrain.zPred cEvaluationValTrain= \ mmr_eval_binvector(cMMRVal.YKernel.get_train(cMMRVal.itrain), \ ypred) vpredtr[vifold]=cEvaluationValTrain.f1 ## ############################################## ## validation test cPredictValTest=cMMRVal.mmr_test(cOptDual,itraindata=1) ## counts the proportion the ones predicted correctly ## ############################################## if cMMRVal.itestmode==2: ypred=inverse_knn(cMMRVal.YKernel.get_Y0(cMMRVal.itrain), \ cPredictValTest) else: ypred=cPredictValTest.zPred cEvaluationValTest= \ mmr_eval_binvector(cMMRVal.YKernel.get_test(cMMRVal.itest), \ ypred) vpred[vifold]=cEvaluationValTest.f1 ## ############################################## np.set_printoptions(precision=4) print('%9.5g'%iC,'%9.5g'%iD,'%9.5g'%dpar1,'%9.5g'%dpar2, \ '%9.5g'%(np.mean(vpredtr)),'%9.5g'%(np.mean(vpred))) ## print(array((iC,iD,dpar1,dpar2,mean(vpredtr),mean(vpred)))) ## print(iC,iD,dpar1,dpar2,mean(vpredtr),mean(vpred)) ## searching for the best configuration in validation mvpred=np.mean(vpred) if cMMR.ieval_type in (0,10): if mvpred>xxmax: xxmax=mvpred xparam.c=iC xparam.d=iD xparam.par1=dpar1 xparam.par2=dpar2 print('The best:',xxmax) else: if mvpred<xxmax: xxmax=mvpred xparam.c=iC xparam.d=iD xparam.par1=dpar1 xparam.par2=dpar2 print('The best:',xxmax) sys.stdout.flush() self.validationScore = xxmax best_param=xparam return(best_param)
def mmr_main(iworkmode): params=mmr_setparams.cls_params() np.set_printoptions(precision=4) dresult={} ## --------------------------------------------- list_object=['parts'] nviews=1 lfile_in=[[[(3,4,21)]]] tfile_out=(3,1,6) lresult=[] for iobject in range(len(lfile_in)): tfile_in=lfile_in[iobject] for ifeature in range(nviews): params.ninputview=len(tfile_in) cMMR=mmr_mmr_cls.cls_mmr(params.ninputview) nfold=cMMR.nfold nrepeat=cMMR.nrepeat cMMR.xbias=-0.95-ifeature*0.05 print('Xbias:',cMMR.xbias) nscore=4 nipar=1 if cMMR.crossval_mode==0: ## random nfold0=nfold xresult_test=np.zeros((nipar,nrepeat,nfold0)) xresult_train=np.zeros((nipar,nrepeat,nfold0)) xpr=np.zeros((nipar,nrepeat,nfold0,nscore)) elif cMMR.crossval_mode==1: ## predefined trianing and test nrepeat=1 nfold0=1 xresult_test=np.zeros((nipar,nrepeat,nfold0)) xresult_train=np.zeros((nipar,nrepeat,nfold0)) xpr=np.zeros((nipar,nrepeat,nfold0,nscore)) ## @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ cdata_store=objpart_load_data.cls_label_files() cdata_store.load_mmr(cMMR,tfile_in,tfile_out) mdata=cMMR.mdata ## ----------------------------------------------- print('Output kernel type: ',cMMR.YKernel.kernel_params.kernel_type) for i in range(params.ninputview): print(i,'Input kernel type: ',cMMR.XKernel[i].kernel_params.kernel_type) ## ------------------------------------------------- xtime=np.zeros(5) ## ############################################################ nparam=4 ## C,D,par1,par2 xbest_param=np.zeros((nrepeat,nfold0,nparam)) ## xinpar=[0.2,0.3,0.4,0.5,0.6] for iipar in range(nipar): print('===================================================') for irepeat in range(nrepeat): ## split data into training and test if cMMR.crossval_mode==0: ## random selection xselector=np.floor(np.random.random(mdata)*nfold0) xselector=xselector-(xselector==nfold0) elif cMMR.crossval_mode==1: ## preddefined training and test xselector=np.zeros(mdata) xselector[cMMR.ifixtrain]=1 for ifold in range(nfold0): cMMR.split_train_test(xselector,ifold) ## validation to choose the best parameters print('Validation') t0=time.clock() ## select the kernel to be validated cMMR.set_validation() cvalidation=mmr_validation_cls.cls_mmr_validation() cvalidation.validation_rkernel=cMMR.XKernel[0].title best_param=cvalidation.mmr_validation(cMMR) xtime[0]=time.clock()-t0 print('Best parameters found by validation') print('c: ',best_param.c) print('d: ',best_param.d) print('par1: ',best_param.par1) print('par2: ',best_param.par2) xbest_param[irepeat,ifold,0]=best_param.c xbest_param[irepeat,ifold,1]=best_param.d xbest_param[irepeat,ifold,2]=best_param.par1 xbest_param[irepeat,ifold,3]=best_param.par2 cMMR.compute_kernels() cMMR.Y0=cMMR.YKernel.get_train(cMMR.itrain) ## candidates ## training with the best parameters print('Training') print(cMMR.YKernel.kernel_params.kernel_type, \ cMMR.YKernel.kernel_params.ipar1, \ cMMR.YKernel.kernel_params.ipar2) for iview in range(cMMR.ninputview): print(cMMR.XKernel[iview].kernel_params.kernel_type, \ cMMR.XKernel[iview].kernel_params.ipar1, \ cMMR.XKernel[iview].kernel_params.ipar2) t0=time.clock() cOptDual=cMMR.mmr_train() xtime[1]=time.clock()-t0 ## cls transfers the dual variables to the test procedure ## compute tests ## check the train accuracy print('Test') cPredictTra=cMMR.mmr_test(cOptDual,itraindata=0) ## counts the proportion the ones predicted correctly ## ###################################### if cMMR.itestmode==2: print('Test knn') ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \ cPredictTra) else: ypred=cPredictTra.zPred cEvaluationTra= \ mmr_eval_binvector(cMMR.YKernel.get_train(cMMR.itrain), \ ypred) xresult_train[iipar,irepeat,ifold]=cEvaluationTra.accuracy print('>>>>>>>>>>>\n',cEvaluationTra.confusion) ## ###################################### ## check the test accuracy t0=time.clock() cPredictTes= cMMR.mmr_test(cOptDual,itraindata=1) ## counts the proportion the ones predicted correctly if cMMR.itestmode==2: ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \ cPredictTes) else: ypred=cPredictTes.zPred ## cEvaluationTes=mmr_eval_binvector(cData.YTest,cPredictTes.zPred) cEvaluationTes= \ mmr_eval_binvector(cMMR.YKernel.get_test(cMMR.itest), \ ypred) xtime[2]=time.clock()-t0 xresult_test[iipar,irepeat,ifold]=cEvaluationTes.accuracy xpr[iipar,irepeat,ifold,0]=cEvaluationTes.precision xpr[iipar,irepeat,ifold,1]=cEvaluationTes.recall xpr[iipar,irepeat,ifold,2]=cEvaluationTes.f1 xpr[iipar,irepeat,ifold,3]=cEvaluationTes.accuracy print(cEvaluationTes.confusion) print(cEvaluationTes.classconfusion) try: xclassconfusion+=cEvaluationTes.classconfusion except: (n,n)=cEvaluationTes.classconfusion.shape xclassconfusion=np.zeros((n,n)) xclassconfusion+=cEvaluationTes.classconfusion ## #################################### print('Parameter:',iipar,'Repetition: ',irepeat, \ 'Fold: ',ifold) mmr_report('Result on one fold', xresult_train[iipar,irepeat,ifold], \ xresult_test[iipar,irepeat,ifold], \ xpr[iipar,irepeat,ifold,:]) print(np.sum(xpr[iipar,irepeat,:ifold+1,:],0)/(ifold+1)) mmr_report('Result on one repetition', np.mean(xresult_train[iipar,irepeat,:]), \ np.mean(xresult_test[iipar,irepeat,:]), \ np.mean(xpr[iipar,irepeat,:,:],0)) mmr_report('Result on all repetitions @@@@@@@', np.mean(xresult_train[iipar,:,:].flatten()), \ np.mean(xresult_test[iipar,:,:].flatten()), \ np.mean(np.mean(xpr[iipar,:,:,:],0),0)) print('Average best parameters') ## sfield=dir(best_param) xlabels=('c','d','par1','par2') for i in range(nparam): ## print(sfield[i]) print(xlabels[i],': ',np.mean(xbest_param[:,:,i]), \ '(',np.std(xbest_param[:,:,i]),')') print('xtime:',xtime) sys.stdout.flush() dresult[ifeature]=(cMMR.xbias,np.mean(np.mean(xpr[iipar,:,:,:],0),0)) for sfeature_type,tresult in dresult.items(): ## xhead=cMMR.xbias headkey=tfile_in[0][0] xhead=cdata_store.dirvar[headkey][0]+', '+cdata_store.dirvar[headkey][1] lresult.append((xhead,tresult)) ## lresult.sort() ## for litem in lresult: ## print(litem) print('\\begin{tabular}{l|rrr}') print('& \\multicolumn{3}{c}{'+'Objects'+'} \\\\') print('Feature type & Precision & Recall & F1 \\\\ \\hline') for litem in lresult: print(litem[0],' & ','%6.4f'%litem[1][1][0], \ ' & ','%6.4f'%litem[1][1][1],' & ','%6.4f'%litem[1][1][2],' \\\\') print('\\end{tabular}') print(xclassconfusion) print('Bye') return
def mmr_main(iworkmode, trainingBase, evalFile, performcl): params = mmr_setparams.cls_params() ## np.set_printoptions(precision=4) dresult = {} nview = 1 nobject = 1 ## !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ## ################################################ lviews = [0, 1] ### this list could contain a subset of 0,1,2 ## ################################################ lresult = [] for iobject in range(nobject): for ifeature in range(nview): params.ninputview = len(lviews) cMMR = mmr_mmr_cls.cls_mmr(params.ninputview) nfold = cMMR.nfold nrepeat = cMMR.nrepeat ## cMMR.xbias=-0.06 ## 4 categories cMMR.xbias = 0.02 - ifeature * 0.01 ## cMMR.xbias=0.1-ifeature*0.01 nscore = 4 nipar = 1 cMMR.crossval_mode = 1 if cMMR.crossval_mode == 0: ## random nfold0 = nfold xresult_test = np.zeros((nipar, nrepeat, nfold0)) xresult_train = np.zeros((nipar, nrepeat, nfold0)) xpr = np.zeros((nipar, nrepeat, nfold0, nscore)) elif cMMR.crossval_mode == 1: ## predefined trianing and test nrepeat = 1 nfold0 = 1 xresult_test = np.zeros((nipar, nrepeat, nfold0)) xresult_train = np.zeros((nipar, nrepeat, nfold0)) xpr = np.zeros((nipar, nrepeat, nfold0, nscore)) cdata_store = trajlab_load_data.cls_label_files( trainingBase, evalFile, performcl) cdata_store.load_mmr(cMMR, lviews) mdata = cMMR.mdata xcross = np.zeros((mdata, mdata)) ## !!!!!!!!!!!!!!!!!! ## params.validation.rkernel=cMMR.XKernel[0].title xtime = np.zeros(5) ## ############################################################ nparam = 4 ## C,D,par1,par2 xbest_param = np.zeros((nrepeat, nfold0, nparam)) for iipar in range(nipar): for irepeat in range(nrepeat): ## split data into training and test if cMMR.crossval_mode == 0: ## random selection xselector = np.zeros(mdata) ifold = 0 for i in range(mdata): xselector[i] = ifold ifold += 1 if ifold >= nfold0: ifold = 0 np.random.shuffle(xselector) elif cMMR.crossval_mode == 1: ## preddefined training and test # (added by simon) train with all data but the last one (not elegant, but works) cMMR.ifixtrain = list(range(mdata - 1)) xselector = np.zeros(mdata) xselector[cMMR.ifixtrain] = 1 for ifold in range(nfold0): cMMR.split_train_test(xselector, ifold) ## validation to choose the best parameters t0 = time.clock() ## !!!!!!!!!!!!!!!!!!!!!!!!! cMMR.set_validation() cvalidation = mmr_validation_cls.cls_mmr_validation() ## !!!!!!!!!!!!!!!!!!!!!!!!! no parameter "params" best_param = cvalidation.mmr_validation(cMMR) xtime[0] = time.clock() - t0 xbest_param[irepeat, ifold, 0] = best_param.c xbest_param[irepeat, ifold, 1] = best_param.d xbest_param[irepeat, ifold, 2] = best_param.par1 xbest_param[irepeat, ifold, 3] = best_param.par2 cMMR.compute_kernels() cMMR.Y0 = cMMR.YKernel.get_train( cMMR.itrain) ## candidates t0 = time.clock() ## !!!!!!!!!!!!!!!!!!!!!!! np "params" cOptDual = cMMR.mmr_train() xtime[1] = time.clock() - t0 ## cls transfers the dual variables to the test procedure ## compute tests ## check the train accuracy ## !!!!!!!!!!!!!!!!!!!!!!! np "params" cPredictTra = cMMR.mmr_test(cOptDual, itraindata=0) ## counts the proportion the ones predicted correctly ## ###################################### if cMMR.itestmode == 2: ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \ cPredictTra) else: ypred = cPredictTra.zPred cEvaluationTra= \ mmr_eval_binvector(cMMR.YKernel.get_train(cMMR.itrain), \ ypred) xresult_train[iipar, irepeat, ifold] = cEvaluationTra.accuracy ## ###################################### ## check the test accuracy t0 = time.clock() ## !!!!!!!!!!!!!!!!!!!!!!! np "params" cPredictTes = cMMR.mmr_test(cOptDual, itraindata=1) ## counts the proportion the ones predicted correctly if cMMR.itestmode == 2: ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \ cPredictTes) else: ypred = cPredictTes.zPred ## cEvaluationTes=mmr_eval_binvector(cData.YTest,cPredictTes.zPred) cEvaluationTes= \ mmr_eval_binvector(cMMR.YKernel.get_test(cMMR.itest), \ ypred) xtime[2] = time.clock() - t0 xresult_test[iipar, irepeat, ifold] = cEvaluationTes.accuracy xpr[iipar, irepeat, ifold, 0] = cEvaluationTes.precision xpr[iipar, irepeat, ifold, 1] = cEvaluationTes.recall xpr[iipar, irepeat, ifold, 2] = cEvaluationTes.f1 xpr[iipar, irepeat, ifold, 3] = cEvaluationTes.accuracy # (added by simon) for now i will just add the new data to # the dataset with a random label and check the confusion # matrix --> very ugly solution but i cant figure it out # in a clean way # print(cEvaluationTes.classconfusion) evaluatedRes = [ row[0] for row in cEvaluationTes.classconfusion ] evaluatedRes.append(cvalidation.validationScore) #nonZeroIndexes = [i for i, e in enumerate(evaluatedRes) if e != 0] #print(evaluatedRes) #return nonZeroIndexes[0] return evaluatedRes try: xclassconfusion += cEvaluationTes.classconfusion except: (n, n) = cEvaluationTes.classconfusion.shape xclassconfusion = np.zeros((n, n)) xclassconfusion += cEvaluationTes.classconfusion ## mmr_eval_label(ZW,iPre,YTesN,Y0,kit_data,itest,params) mmr_report.mmr_report('Result on one fold', xresult_train[iipar,irepeat,ifold], \ xresult_test[iipar,irepeat,ifold], \ xpr[iipar,irepeat,ifold,:]) sys.stdout.flush() dresult[ifeature] = (cMMR.xbias, np.mean(np.mean(xpr[iipar, :, :, :], 0), 0)) for sfeature_type, tresult in dresult.items(): ## xhead=cMMR.xbias xhead = '' lresult.append((xhead, tresult)) return [-1]
def mmr_main(iworkmode): params=mmr_setparams.cls_params() np.set_printoptions(precision=4) dresult={} ## --------------------------------------------- nview=1 nobject=1 params.ninputview=nview lresult=[] for iobject in range(nobject): for ifeature in range(nview): cMMR=mmr_mmr_cls.cls_mmr(params.ninputview) nfold=cMMR.nfold nrepeat=cMMR.nrepeat ## cMMR.xbias=-0.06 ## 4 categories cMMR.xbias=0.0 ## cMMR.xbias=0.1-ifeature*0.01 print('Xbias:',cMMR.xbias) nscore=4 nipar=1 if cMMR.crossval_mode==0: ## random nfold0=nfold xresult_test=np.zeros((nipar,nrepeat,nfold0)) xresult_train=np.zeros((nipar,nrepeat,nfold0)) xpr=np.zeros((nipar,nrepeat,nfold0,nscore)) elif cMMR.crossval_mode==1: ## predefined trianing and test nrepeat=1 nfold0=1 xresult_test=np.zeros((nipar,nrepeat,nfold0)) xresult_train=np.zeros((nipar,nrepeat,nfold0)) xpr=np.zeros((nipar,nrepeat,nfold0,nscore)) ## @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ## cMMR=mmr_mmr_cls.cls_mmr(params.ninputview) cdata_store=vision_load_data.cls_label_files() cdata_store.load_mmr(cMMR) mdata=cMMR.mdata ## ----------------------------------------------- print('Output kernel type: ',cMMR.YKernel.kernel_params.kernel_type) for i in range(params.ninputview): print(i,'Input kernel type: ',cMMR.XKernel[i].kernel_params.kernel_type) ## ------------------------------------------------- xcross=np.zeros((mdata,mdata)) xtime=np.zeros(5) ## ############################################################ nparam=4 ## C,D,par1,par2 xbest_param=np.zeros((nrepeat,nfold0,nparam)) for iipar in range(nipar): print('===================================================') for irepeat in range(nrepeat): ## split data into training and test if cMMR.crossval_mode==0: ## random selection xselector=np.zeros(mdata) ifold=0 for i in range(mdata): xselector[i]=ifold ifold+=1 if ifold>=nfold0: ifold=0 np.random.shuffle(xselector) ## xselector=np.floor(np.random.random(mdata)*nfold0) ## xselector=xselector-(xselector==nfold0) elif cMMR.crossval_mode==1: ## preddefined training and test xselector=np.zeros(mdata) xselector[cMMR.ifixtrain]=1 for ifold in range(nfold0): cMMR.split_train_test(xselector,ifold) ## validation to choose the best parameters print('Validation') t0=time.clock() ## select the kernel to be validated cMMR.set_validation() cvalidation=mmr_validation_cls.cls_mmr_validation() cvalidation.validation_rkernel=cMMR.XKernel[0].title best_param=cvalidation.mmr_validation(cMMR) xtime[0]=time.clock()-t0 print('Best parameters found by validation') print('c: ',best_param.c) print('d: ',best_param.d) print('par1: ',best_param.par1) print('par2: ',best_param.par2) xbest_param[irepeat,ifold,0]=best_param.c xbest_param[irepeat,ifold,1]=best_param.d xbest_param[irepeat,ifold,2]=best_param.par1 xbest_param[irepeat,ifold,3]=best_param.par2 cMMR.compute_kernels() cMMR.Y0=cMMR.YKernel.get_train(cMMR.itrain) ## candidates ## training with the best parameters print('Training') print(cMMR.YKernel.kernel_params.kernel_type, \ cMMR.YKernel.kernel_params.ipar1, \ cMMR.YKernel.kernel_params.ipar2) for iview in range(cMMR.ninputview): print(cMMR.XKernel[iview].kernel_params.kernel_type, \ cMMR.XKernel[iview].kernel_params.ipar1, \ cMMR.XKernel[iview].kernel_params.ipar2) t0=time.clock() cOptDual=cMMR.mmr_train() xtime[1]=time.clock()-t0 ## cls transfers the dual variables to the test procedure ## compute tests ## check the train accuracy print('Test') cPredictTra=cMMR.mmr_test(cOptDual,itraindata=0) ## counts the proportion the ones predicted correctly ## ###################################### if cMMR.itestmode==2: print('Test knn') ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \ cPredictTra) else: ypred=cPredictTra.zPred cEvaluationTra= \ mmr_eval_binvector(cMMR.YKernel.get_train(cMMR.itrain), \ ypred) xresult_train[iipar,irepeat,ifold]=cEvaluationTra.accuracy print('>>>>>>>>>>>\n',cEvaluationTra.confusion) ## ###################################### ## check the test accuracy t0=time.clock() cPredictTes= cMMR.mmr_test(cOptDual,itraindata=1) ## counts the proportion the ones predicted correctly if cMMR.itestmode==2: ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \ cPredictTes) else: ypred=cPredictTes.zPred ## cEvaluationTes=mmr_eval_binvector(cData.YTest,cPredictTes.zPred) cEvaluationTes= \ mmr_eval_binvector(cMMR.YKernel.get_test(cMMR.itest), \ ypred) xtime[2]=time.clock()-t0 xresult_test[iipar,irepeat,ifold]=cEvaluationTes.accuracy xpr[iipar,irepeat,ifold,0]=cEvaluationTes.precision xpr[iipar,irepeat,ifold,1]=cEvaluationTes.recall xpr[iipar,irepeat,ifold,2]=cEvaluationTes.f1 xpr[iipar,irepeat,ifold,3]=cEvaluationTes.accuracy print(cEvaluationTes.confusion) print(cEvaluationTes.classconfusion) try: xclassconfusion+=cEvaluationTes.classconfusion except: (n,n)=cEvaluationTes.classconfusion.shape xclassconfusion=np.zeros((n,n)) xclassconfusion+=cEvaluationTes.classconfusion ## mmr_eval_label(ZW,iPre,YTesN,Y0,kit_data,itest,params) ## #################################### print('Parameter:',iipar,'Repetition: ',irepeat, \ 'Fold: ',ifold) mmr_report('Result on one fold', xresult_train[iipar,irepeat,ifold], \ xresult_test[iipar,irepeat,ifold], \ xpr[iipar,irepeat,ifold,:]) print(np.sum(xpr[iipar,irepeat,:ifold+1,:],0)/(ifold+1)) mmr_report('Result on one repetition', np.mean(xresult_train[iipar,irepeat,:]), \ np.mean(xresult_test[iipar,irepeat,:]), \ np.mean(xpr[iipar,irepeat,:,:],0)) mmr_report('Result on all repetitions @@@@@@@', np.mean(xresult_train[iipar,:,:].flatten()), \ np.mean(xresult_test[iipar,:,:].flatten()), \ np.mean(np.mean(xpr[iipar,:,:,:],0),0)) print('Average best parameters') ## sfield=dir(best_param) xlabels=('c','d','par1','par2') for i in range(nparam): ## print(sfield[i]) print(xlabels[i],': ',np.mean(xbest_param[:,:,i]), \ '(',np.std(xbest_param[:,:,i]),')') print('xtime:',xtime) sys.stdout.flush() dresult[ifeature]=(cMMR.xbias,np.mean(np.mean(xpr[iipar,:,:,:],0),0)) for sfeature_type,tresult in dresult.items(): ## xhead=cMMR.xbias xhead='' lresult.append((xhead,tresult)) ## lresult.sort() ## for litem in lresult: ## print(litem) print('\\begin{tabular}{l|rrr}') print('& \\multicolumn{3}{c}{'+'Objects'+'} \\\\') print('Feature type & Precision & Recall & F1 \\\\ \\hline') for litem in lresult: print(litem[0],' & ','%6.4f'%litem[1][1][0], \ ' & ','%6.4f'%litem[1][1][1],' & ','%6.4f'%litem[1][1][2],' \\\\') print('\\end{tabular}') ## print('\\begin{tabular}{l|rrr}') ## print('& \\multicolumn{3}{c}{'+'Objects'+'} \\\\') ## print('Feature & xbias & Precision & Recall & F1 \\\\ \\hline') ## for litem in lresult: ## print(litem[0],' & ','%6.4f'%litem[1][0],' & ','%6.4f'%litem[1][1][0], \ ## ' & ','%6.4f'%litem[1][1][1],' & ','%6.4f'%litem[1][1][2],' \\\\') ## print('\\end{tabular}') ## ########################################################## ## !!!! It saves the optimal dual variables, and optimal, crossvalidated, ## kernel parameters into files given in vision_load_data. ## prepare full training with the best parameters ifold=0 xselector=np.ones(mdata) cMMR.split_train_test(xselector,ifold) best_param=np.array([ np.mean(xbest_param[:,:,i]) for i in range(nparam)]) cMMR.penalty.c=best_param[0] cMMR.penalty.d=best_param[1] cMMR.XKernel[0].kernel_params.ipar1=best_param[2] cMMR.XKernel[0].kernel_params.ipar2=best_param[3] cMMR.compute_kernels() cMMR.Y0=cMMR.YKernel.get_train(cMMR.itrain) ## candidates ## training with the best parameters print('Full training') cOptDual=cMMR.mmr_train() np.savetxt(cdata_store.sbasedir+cdata_store.dual_params,cMMR.dual.alpha, \ fmt='%9.4f') np.savetxt(cdata_store.sbasedir+cdata_store.kernel_params,best_param[2:], \ fmt='%9.4f') print(xclassconfusion) print('Bye') return
def mmr_main(iworkmode, trainingBase, evalFile, performcl): params=mmr_setparams.cls_params() ## np.set_printoptions(precision=4) dresult={} nview=1 nobject=1 ## !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ## ################################################ lviews=[0,1] ### this list could contain a subset of 0,1,2 ## ################################################ lresult=[] for iobject in range(nobject): for ifeature in range(nview): params.ninputview=len(lviews) cMMR=mmr_mmr_cls.cls_mmr(params.ninputview) nfold=cMMR.nfold nrepeat=cMMR.nrepeat ## cMMR.xbias=-0.06 ## 4 categories cMMR.xbias=0.02-ifeature*0.01 ## cMMR.xbias=0.1-ifeature*0.01 nscore=4 nipar=1 cMMR.crossval_mode = 1 if cMMR.crossval_mode==0: ## random nfold0=nfold xresult_test=np.zeros((nipar,nrepeat,nfold0)) xresult_train=np.zeros((nipar,nrepeat,nfold0)) xpr=np.zeros((nipar,nrepeat,nfold0,nscore)) elif cMMR.crossval_mode==1: ## predefined trianing and test nrepeat=1 nfold0=1 xresult_test=np.zeros((nipar,nrepeat,nfold0)) xresult_train=np.zeros((nipar,nrepeat,nfold0)) xpr=np.zeros((nipar,nrepeat,nfold0,nscore)) cdata_store = trajlab_load_data.cls_label_files(trainingBase, evalFile, performcl) cdata_store.load_mmr(cMMR, lviews) mdata=cMMR.mdata xcross=np.zeros((mdata,mdata)) ## !!!!!!!!!!!!!!!!!! ## params.validation.rkernel=cMMR.XKernel[0].title xtime=np.zeros(5) ## ############################################################ nparam=4 ## C,D,par1,par2 xbest_param=np.zeros((nrepeat,nfold0,nparam)) for iipar in range(nipar): for irepeat in range(nrepeat): ## split data into training and test if cMMR.crossval_mode==0: ## random selection xselector=np.zeros(mdata) ifold=0 for i in range(mdata): xselector[i]=ifold ifold+=1 if ifold>=nfold0: ifold=0 np.random.shuffle(xselector) elif cMMR.crossval_mode==1: ## preddefined training and test # (added by simon) train with all data but the last one (not elegant, but works) cMMR.ifixtrain = list(range(mdata - 1)) xselector = np.zeros(mdata) xselector[cMMR.ifixtrain] = 1 for ifold in range(nfold0): cMMR.split_train_test(xselector,ifold) ## validation to choose the best parameters t0 = time.clock() ## !!!!!!!!!!!!!!!!!!!!!!!!! cMMR.set_validation() cvalidation=mmr_validation_cls.cls_mmr_validation() ## !!!!!!!!!!!!!!!!!!!!!!!!! no parameter "params" best_param = cvalidation.mmr_validation(cMMR) xtime[0] = time.clock() - t0 xbest_param[irepeat,ifold,0]=best_param.c xbest_param[irepeat,ifold,1]=best_param.d xbest_param[irepeat,ifold,2]=best_param.par1 xbest_param[irepeat,ifold,3]=best_param.par2 cMMR.compute_kernels() cMMR.Y0=cMMR.YKernel.get_train(cMMR.itrain) ## candidates t0=time.clock() ## !!!!!!!!!!!!!!!!!!!!!!! np "params" cOptDual=cMMR.mmr_train() xtime[1]=time.clock()-t0 ## cls transfers the dual variables to the test procedure ## compute tests ## check the train accuracy ## !!!!!!!!!!!!!!!!!!!!!!! np "params" cPredictTra = cMMR.mmr_test(cOptDual,itraindata=0) ## counts the proportion the ones predicted correctly ## ###################################### if cMMR.itestmode==2: ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \ cPredictTra) else: ypred=cPredictTra.zPred cEvaluationTra= \ mmr_eval_binvector(cMMR.YKernel.get_train(cMMR.itrain), \ ypred) xresult_train[iipar,irepeat,ifold]=cEvaluationTra.accuracy ## ###################################### ## check the test accuracy t0=time.clock() ## !!!!!!!!!!!!!!!!!!!!!!! np "params" cPredictTes = cMMR.mmr_test(cOptDual,itraindata=1) ## counts the proportion the ones predicted correctly if cMMR.itestmode==2: ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \ cPredictTes) else: ypred=cPredictTes.zPred ## cEvaluationTes=mmr_eval_binvector(cData.YTest,cPredictTes.zPred) cEvaluationTes= \ mmr_eval_binvector(cMMR.YKernel.get_test(cMMR.itest), \ ypred) xtime[2] = time.clock() - t0 xresult_test[iipar,irepeat,ifold] = cEvaluationTes.accuracy xpr[iipar,irepeat,ifold,0]=cEvaluationTes.precision xpr[iipar,irepeat,ifold,1]=cEvaluationTes.recall xpr[iipar,irepeat,ifold,2]=cEvaluationTes.f1 xpr[iipar,irepeat,ifold,3]=cEvaluationTes.accuracy # (added by simon) for now i will just add the new data to # the dataset with a random label and check the confusion # matrix --> very ugly solution but i cant figure it out # in a clean way # print(cEvaluationTes.classconfusion) evaluatedRes = [row[0] for row in cEvaluationTes.classconfusion] evaluatedRes.append(cvalidation.validationScore) #nonZeroIndexes = [i for i, e in enumerate(evaluatedRes) if e != 0] #print(evaluatedRes) #return nonZeroIndexes[0] return evaluatedRes try: xclassconfusion+=cEvaluationTes.classconfusion except: (n,n)=cEvaluationTes.classconfusion.shape xclassconfusion=np.zeros((n,n)) xclassconfusion+=cEvaluationTes.classconfusion ## mmr_eval_label(ZW,iPre,YTesN,Y0,kit_data,itest,params) mmr_report.mmr_report('Result on one fold', xresult_train[iipar,irepeat,ifold], \ xresult_test[iipar,irepeat,ifold], \ xpr[iipar,irepeat,ifold,:]) sys.stdout.flush() dresult[ifeature]=(cMMR.xbias,np.mean(np.mean(xpr[iipar,:,:,:],0),0)) for sfeature_type,tresult in dresult.items(): ## xhead=cMMR.xbias xhead='' lresult.append((xhead,tresult)) return [-1]
def mmr_main(iworkmode): params=mmr_setparams.cls_params() ## @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ list_features=["annot","DenseHue","DenseHueV3H1", \ "DenseSift","DenseSiftV3H1","Gist", \ "HarrisHue","HarrisHueV3H1","HarrisSift", \ "HarrisSiftV3H1","Hsv","HsvV3H1","Lab", \ "LabV3H1","Rgb","RgbV3H1"] ## data files in the corresponding directories datadirs=['corel5k','espgame','iaprtc12','mirflickr','pascal07'] ## Example lfeatures=[4,8] means we selected the features: ## "DenseSiftV3H1" and "HarrisSift" lfeatures=[4] params.ninputview=len(lfeatures) idata=0 ## process corel5k data set xdatacls=mmr_mmr_cls.cls_mmr(params.ninputview) nfold=xdatacls.nfold nrepeat=xdatacls.nrepeat print('Xbias:',xdatacls.xbias) cdata_store=mmr_load_data.cls_data_load() cdata_store.load_data(xdatacls,idata,lfeatures) mdata=xdatacls.mdata ## initializing the array collecting the results nscore=4 nipar=1 if xdatacls.crossval_mode==0: ## random nfold0=nfold xresult_test=np.zeros((nipar,nrepeat,nfold0)) xresult_train=np.zeros((nipar,nrepeat,nfold0)) xpr=np.zeros((nipar,nrepeat,nfold0,nscore)) elif xdatacls.crossval_mode==1: ## predefined trianing and test nrepeat=1 nfold0=1 xresult_test=np.zeros((nipar,nrepeat,nfold0)) xresult_train=np.zeros((nipar,nrepeat,nfold0)) xpr=np.zeros((nipar,nrepeat,nfold0,nscore)) ## ----------------------------------------------- print('Output kernel type: ',xdatacls.YKernel.kernel_params.kernel_type) for i in range(params.ninputview): print(i,'Input kernel type: ',xdatacls.XKernel[i].kernel_params.kernel_type) ## ------------------------------------------------- xcross=np.zeros((mdata,mdata)) xtime=np.zeros(5) ## ############################################################ nparam=4 ## C,D,par1,par2 xbest_param=np.zeros((nrepeat,nfold0,nparam)) for iipar in range(nipar): print('===================================================') for irepeat in range(nrepeat): xdatacls.prepare_repetition_training(nfold0) for ifold in range(nfold0): xdatacls.prepare_fold_training(ifold) ## validation to choose the best parameters print('Validation') t0=time.clock() xdatacls.set_validation() cvalidation=mmr_validation_cls.cls_mmr_validation() cvalidation.validation_rkernel=xdatacls.XKernel[0].title best_param=cvalidation.mmr_validation(xdatacls) xtime[0]=time.clock()-t0 print('Best parameters found by validation') print('c: ',best_param.c) print('d: ',best_param.d) print('par1: ',best_param.par1) print('par2: ',best_param.par2) xbest_param[irepeat,ifold,0]=best_param.c xbest_param[irepeat,ifold,1]=best_param.d xbest_param[irepeat,ifold,2]=best_param.par1 xbest_param[irepeat,ifold,3]=best_param.par2 xdatacls.compute_kernels() xdatacls.Y0=xdatacls.YKernel.get_train(xdatacls.itrain) ## candidates ## training with the best parameters print('Training') print(xdatacls.YKernel.kernel_params.kernel_type, \ xdatacls.YKernel.kernel_params.ipar1, \ xdatacls.YKernel.kernel_params.ipar2) for iview in range(xdatacls.ninputview): print(xdatacls.XKernel[iview].kernel_params.kernel_type, \ xdatacls.XKernel[iview].kernel_params.ipar1, \ xdatacls.XKernel[iview].kernel_params.ipar2) t0=time.clock() cOptDual=xdatacls.mmr_train() xtime[1]=time.clock()-t0 ## cls transfers the dual variables to the test procedure ## compute tests ## check the train accuracy print('Test') cPredictTra=xdatacls.mmr_test(cOptDual,itraindata=0) ## counts the proportion the ones predicted correctly ## ###################################### if xdatacls.itestmode==2: print('Test knn') ypred=inverse_knn(xdatacls.YKernel.get_Y0(xdatacls.itrain), \ cPredictTra) else: ypred=cPredictTra.zPred cEvaluationTra= \ mmr_eval_binvector(xdatacls.YKernel.get_train(xdatacls.itrain), \ ypred) xresult_train[iipar,irepeat,ifold]=cEvaluationTra.accuracy print('>>>>>>>>>>>\n',cEvaluationTra.confusion) ## ###################################### ## check the test accuracy t0=time.clock() cPredictTes= xdatacls.mmr_test(cOptDual,itraindata=1) ## counts the proportion the ones predicted correctly if xdatacls.itestmode==2: ypred=inverse_knn(xdatacls.YKernel.get_Y0(xdatacls.itrain), \ cPredictTes) else: ypred=cPredictTes.zPred ## cEvaluationTes=mmr_eval_binvector(cData.YTest,cPredictTes.zPred) cEvaluationTes= \ mmr_eval_binvector(xdatacls.YKernel.get_test(xdatacls.itest), \ ypred) xtime[2]=time.clock()-t0 xresult_test[iipar,irepeat,ifold]=cEvaluationTes.accuracy xpr[iipar,irepeat,ifold,0]=cEvaluationTes.precision xpr[iipar,irepeat,ifold,1]=cEvaluationTes.recall xpr[iipar,irepeat,ifold,2]=cEvaluationTes.f1 xpr[iipar,irepeat,ifold,3]=cEvaluationTes.accuracy print(cEvaluationTes.confusion) print(cEvaluationTes.classconfusion) try: xclassconfusion+=cEvaluationTes.classconfusion except: (n,n)=cEvaluationTes.classconfusion.shape xclassconfusion=np.zeros((n,n)) xclassconfusion+=cEvaluationTes.classconfusion ## mmr_eval_label(ZW,iPre,YTesN,Y0,kit_data,itest,params) ## #################################### print('Parameter:',iipar,'Repetition: ',irepeat, \ 'Fold: ',ifold) mmr_report.mmr_report('Result on one fold', xresult_train[iipar,irepeat,ifold], \ xresult_test[iipar,irepeat,ifold], \ xpr[iipar,irepeat,ifold,:]) print(np.sum(xpr[iipar,irepeat,:ifold+1,:],0)/(ifold+1)) mmr_report.mmr_report('Result on one repetition', np.mean(xresult_train[iipar,irepeat,:]), \ np.mean(xresult_test[iipar,irepeat,:]), \ np.mean(xpr[iipar,irepeat,:,:],0)) mmr_report.mmr_report('Result on all repetitions @@@@@@@', np.mean(xresult_train[iipar,:,:].flatten()), \ np.mean(xresult_test[iipar,:,:].flatten()), \ np.mean(np.mean(xpr[iipar,:,:,:],0),0)) print('Average best parameters') xlabels=('c','d','par1','par2') for i in range(nparam): print(xlabels[i],': ',np.mean(xbest_param[:,:,i]), \ '(',np.std(xbest_param[:,:,i]),')') print('xtime:',xtime) sys.stdout.flush() print('Bye') return
def mmr_main(iworkmode): params = mmr_setparams.cls_params() np.set_printoptions(precision=4) dresult = {} ## --------------------------------------------- nview = 1 nobject = 1 params.ninputview = nview lresult = [] for iobject in range(nobject): for ifeature in range(nview): cMMR = mmr_mmr_cls.cls_mmr(params.ninputview) nfold = cMMR.nfold nrepeat = cMMR.nrepeat ## cMMR.xbias=-0.06 ## 4 categories cMMR.xbias = 0.0 ## cMMR.xbias=0.1-ifeature*0.01 print('Xbias:', cMMR.xbias) nscore = 4 nipar = 1 if cMMR.crossval_mode == 0: ## random nfold0 = nfold xresult_test = np.zeros((nipar, nrepeat, nfold0)) xresult_train = np.zeros((nipar, nrepeat, nfold0)) xpr = np.zeros((nipar, nrepeat, nfold0, nscore)) elif cMMR.crossval_mode == 1: ## predefined trianing and test nrepeat = 1 nfold0 = 1 xresult_test = np.zeros((nipar, nrepeat, nfold0)) xresult_train = np.zeros((nipar, nrepeat, nfold0)) xpr = np.zeros((nipar, nrepeat, nfold0, nscore)) ## @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ## cMMR=mmr_mmr_cls.cls_mmr(params.ninputview) cdata_store = vision_load_data.cls_label_files() cdata_store.load_mmr(cMMR) mdata = cMMR.mdata ## ----------------------------------------------- print('Output kernel type: ', cMMR.YKernel.kernel_params.kernel_type) for i in range(params.ninputview): print(i, 'Input kernel type: ', cMMR.XKernel[i].kernel_params.kernel_type) ## ------------------------------------------------- xcross = np.zeros((mdata, mdata)) xtime = np.zeros(5) ## ############################################################ nparam = 4 ## C,D,par1,par2 xbest_param = np.zeros((nrepeat, nfold0, nparam)) for iipar in range(nipar): print('===================================================') for irepeat in range(nrepeat): ## split data into training and test if cMMR.crossval_mode == 0: ## random selection xselector = np.zeros(mdata) ifold = 0 for i in range(mdata): xselector[i] = ifold ifold += 1 if ifold >= nfold0: ifold = 0 np.random.shuffle(xselector) ## xselector=np.floor(np.random.random(mdata)*nfold0) ## xselector=xselector-(xselector==nfold0) elif cMMR.crossval_mode == 1: ## preddefined training and test xselector = np.zeros(mdata) xselector[cMMR.ifixtrain] = 1 for ifold in range(nfold0): cMMR.split_train_test(xselector, ifold) ## validation to choose the best parameters print('Validation') t0 = time.clock() ## select the kernel to be validated cMMR.set_validation() cvalidation = mmr_validation_cls.cls_mmr_validation() cvalidation.validation_rkernel = cMMR.XKernel[0].title best_param = cvalidation.mmr_validation(cMMR) xtime[0] = time.clock() - t0 print('Best parameters found by validation') print('c: ', best_param.c) print('d: ', best_param.d) print('par1: ', best_param.par1) print('par2: ', best_param.par2) xbest_param[irepeat, ifold, 0] = best_param.c xbest_param[irepeat, ifold, 1] = best_param.d xbest_param[irepeat, ifold, 2] = best_param.par1 xbest_param[irepeat, ifold, 3] = best_param.par2 cMMR.compute_kernels() cMMR.Y0 = cMMR.YKernel.get_train( cMMR.itrain) ## candidates ## training with the best parameters print('Training') print(cMMR.YKernel.kernel_params.kernel_type, \ cMMR.YKernel.kernel_params.ipar1, \ cMMR.YKernel.kernel_params.ipar2) for iview in range(cMMR.ninputview): print(cMMR.XKernel[iview].kernel_params.kernel_type, \ cMMR.XKernel[iview].kernel_params.ipar1, \ cMMR.XKernel[iview].kernel_params.ipar2) t0 = time.clock() cOptDual = cMMR.mmr_train() xtime[1] = time.clock() - t0 ## cls transfers the dual variables to the test procedure ## compute tests ## check the train accuracy print('Test') cPredictTra = cMMR.mmr_test(cOptDual, itraindata=0) ## counts the proportion the ones predicted correctly ## ###################################### if cMMR.itestmode == 2: print('Test knn') ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \ cPredictTra) else: ypred = cPredictTra.zPred cEvaluationTra= \ mmr_eval_binvector(cMMR.YKernel.get_train(cMMR.itrain), \ ypred) xresult_train[iipar, irepeat, ifold] = cEvaluationTra.accuracy print('>>>>>>>>>>>\n', cEvaluationTra.confusion) ## ###################################### ## check the test accuracy t0 = time.clock() cPredictTes = cMMR.mmr_test(cOptDual, itraindata=1) ## counts the proportion the ones predicted correctly if cMMR.itestmode == 2: ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \ cPredictTes) else: ypred = cPredictTes.zPred ## cEvaluationTes=mmr_eval_binvector(cData.YTest,cPredictTes.zPred) cEvaluationTes= \ mmr_eval_binvector(cMMR.YKernel.get_test(cMMR.itest), \ ypred) xtime[2] = time.clock() - t0 xresult_test[iipar, irepeat, ifold] = cEvaluationTes.accuracy xpr[iipar, irepeat, ifold, 0] = cEvaluationTes.precision xpr[iipar, irepeat, ifold, 1] = cEvaluationTes.recall xpr[iipar, irepeat, ifold, 2] = cEvaluationTes.f1 xpr[iipar, irepeat, ifold, 3] = cEvaluationTes.accuracy print(cEvaluationTes.confusion) print(cEvaluationTes.classconfusion) try: xclassconfusion += cEvaluationTes.classconfusion except: (n, n) = cEvaluationTes.classconfusion.shape xclassconfusion = np.zeros((n, n)) xclassconfusion += cEvaluationTes.classconfusion ## mmr_eval_label(ZW,iPre,YTesN,Y0,kit_data,itest,params) ## #################################### print('Parameter:',iipar,'Repetition: ',irepeat, \ 'Fold: ',ifold) mmr_report('Result on one fold', xresult_train[iipar,irepeat,ifold], \ xresult_test[iipar,irepeat,ifold], \ xpr[iipar,irepeat,ifold,:]) print( np.sum(xpr[iipar, irepeat, :ifold + 1, :], 0) / (ifold + 1)) mmr_report('Result on one repetition', np.mean(xresult_train[iipar,irepeat,:]), \ np.mean(xresult_test[iipar,irepeat,:]), \ np.mean(xpr[iipar,irepeat,:,:],0)) mmr_report('Result on all repetitions @@@@@@@', np.mean(xresult_train[iipar,:,:].flatten()), \ np.mean(xresult_test[iipar,:,:].flatten()), \ np.mean(np.mean(xpr[iipar,:,:,:],0),0)) print('Average best parameters') ## sfield=dir(best_param) xlabels = ('c', 'd', 'par1', 'par2') for i in range(nparam): ## print(sfield[i]) print(xlabels[i],': ',np.mean(xbest_param[:,:,i]), \ '(',np.std(xbest_param[:,:,i]),')') print('xtime:', xtime) sys.stdout.flush() dresult[ifeature] = (cMMR.xbias, np.mean(np.mean(xpr[iipar, :, :, :], 0), 0)) for sfeature_type, tresult in dresult.items(): ## xhead=cMMR.xbias xhead = '' lresult.append((xhead, tresult)) ## lresult.sort() ## for litem in lresult: ## print(litem) print('\\begin{tabular}{l|rrr}') print('& \\multicolumn{3}{c}{' + 'Objects' + '} \\\\') print('Feature type & Precision & Recall & F1 \\\\ \\hline') for litem in lresult: print(litem[0],' & ','%6.4f'%litem[1][1][0], \ ' & ','%6.4f'%litem[1][1][1],' & ','%6.4f'%litem[1][1][2],' \\\\') print('\\end{tabular}') ## print('\\begin{tabular}{l|rrr}') ## print('& \\multicolumn{3}{c}{'+'Objects'+'} \\\\') ## print('Feature & xbias & Precision & Recall & F1 \\\\ \\hline') ## for litem in lresult: ## print(litem[0],' & ','%6.4f'%litem[1][0],' & ','%6.4f'%litem[1][1][0], \ ## ' & ','%6.4f'%litem[1][1][1],' & ','%6.4f'%litem[1][1][2],' \\\\') ## print('\\end{tabular}') ## ########################################################## ## !!!! It saves the optimal dual variables, and optimal, crossvalidated, ## kernel parameters into files given in vision_load_data. ## prepare full training with the best parameters ifold = 0 xselector = np.ones(mdata) cMMR.split_train_test(xselector, ifold) best_param = np.array( [np.mean(xbest_param[:, :, i]) for i in range(nparam)]) cMMR.penalty.c = best_param[0] cMMR.penalty.d = best_param[1] cMMR.XKernel[0].kernel_params.ipar1 = best_param[2] cMMR.XKernel[0].kernel_params.ipar2 = best_param[3] cMMR.compute_kernels() cMMR.Y0 = cMMR.YKernel.get_train(cMMR.itrain) ## candidates ## training with the best parameters print('Full training') cOptDual = cMMR.mmr_train() np.savetxt(cdata_store.sbasedir+cdata_store.dual_params,cMMR.dual.alpha, \ fmt='%9.4f') np.savetxt(cdata_store.sbasedir+cdata_store.kernel_params,best_param[2:], \ fmt='%9.4f') print(xclassconfusion) print('Bye') return
def mmr_main(iworkmode): params = mmr_setparams.cls_params() ## @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ list_features=["annot","DenseHue","DenseHueV3H1", \ "DenseSift","DenseSiftV3H1","Gist", \ "HarrisHue","HarrisHueV3H1","HarrisSift", \ "HarrisSiftV3H1","Hsv","HsvV3H1","Lab", \ "LabV3H1","Rgb","RgbV3H1"] ## data files in the corresponding directories datadirs = ['corel5k', 'espgame', 'iaprtc12', 'mirflickr', 'pascal07'] ## Example lfeatures=[4,8] means we selected the features: ## "DenseSiftV3H1" and "HarrisSift" lfeatures = [4] params.ninputview = len(lfeatures) idata = 0 ## process corel5k data set xdatacls = mmr_mmr_cls.cls_mmr(params.ninputview) nfold = xdatacls.nfold nrepeat = xdatacls.nrepeat print('Xbias:', xdatacls.xbias) cdata_store = mmr_load_data.cls_data_load() cdata_store.load_data(xdatacls, idata, lfeatures) mdata = xdatacls.mdata ## initializing the array collecting the results nscore = 4 nipar = 1 if xdatacls.crossval_mode == 0: ## random nfold0 = nfold xresult_test = np.zeros((nipar, nrepeat, nfold0)) xresult_train = np.zeros((nipar, nrepeat, nfold0)) xpr = np.zeros((nipar, nrepeat, nfold0, nscore)) elif xdatacls.crossval_mode == 1: ## predefined trianing and test nrepeat = 1 nfold0 = 1 xresult_test = np.zeros((nipar, nrepeat, nfold0)) xresult_train = np.zeros((nipar, nrepeat, nfold0)) xpr = np.zeros((nipar, nrepeat, nfold0, nscore)) ## ----------------------------------------------- print('Output kernel type: ', xdatacls.YKernel.kernel_params.kernel_type) for i in range(params.ninputview): print(i, 'Input kernel type: ', xdatacls.XKernel[i].kernel_params.kernel_type) ## ------------------------------------------------- xcross = np.zeros((mdata, mdata)) xtime = np.zeros(5) ## ############################################################ nparam = 4 ## C,D,par1,par2 xbest_param = np.zeros((nrepeat, nfold0, nparam)) for iipar in range(nipar): print('===================================================') for irepeat in range(nrepeat): xdatacls.prepare_repetition_training(nfold0) for ifold in range(nfold0): xdatacls.prepare_fold_training(ifold) ## validation to choose the best parameters print('Validation') t0 = time.clock() xdatacls.set_validation() cvalidation = mmr_validation_cls.cls_mmr_validation() cvalidation.validation_rkernel = xdatacls.XKernel[0].title best_param = cvalidation.mmr_validation(xdatacls) xtime[0] = time.clock() - t0 print('Best parameters found by validation') print('c: ', best_param.c) print('d: ', best_param.d) print('par1: ', best_param.par1) print('par2: ', best_param.par2) xbest_param[irepeat, ifold, 0] = best_param.c xbest_param[irepeat, ifold, 1] = best_param.d xbest_param[irepeat, ifold, 2] = best_param.par1 xbest_param[irepeat, ifold, 3] = best_param.par2 xdatacls.compute_kernels() xdatacls.Y0 = xdatacls.YKernel.get_train( xdatacls.itrain) ## candidates ## training with the best parameters print('Training') print(xdatacls.YKernel.kernel_params.kernel_type, \ xdatacls.YKernel.kernel_params.ipar1, \ xdatacls.YKernel.kernel_params.ipar2) for iview in range(xdatacls.ninputview): print(xdatacls.XKernel[iview].kernel_params.kernel_type, \ xdatacls.XKernel[iview].kernel_params.ipar1, \ xdatacls.XKernel[iview].kernel_params.ipar2) t0 = time.clock() cOptDual = xdatacls.mmr_train() xtime[1] = time.clock() - t0 ## cls transfers the dual variables to the test procedure ## compute tests ## check the train accuracy print('Test') cPredictTra = xdatacls.mmr_test(cOptDual, itraindata=0) ## counts the proportion the ones predicted correctly ## ###################################### if xdatacls.itestmode == 2: print('Test knn') ypred=inverse_knn(xdatacls.YKernel.get_Y0(xdatacls.itrain), \ cPredictTra) else: ypred = cPredictTra.zPred cEvaluationTra= \ mmr_eval_binvector(xdatacls.YKernel.get_train(xdatacls.itrain), \ ypred) xresult_train[iipar, irepeat, ifold] = cEvaluationTra.accuracy print('>>>>>>>>>>>\n', cEvaluationTra.confusion) ## ###################################### ## check the test accuracy t0 = time.clock() cPredictTes = xdatacls.mmr_test(cOptDual, itraindata=1) ## counts the proportion the ones predicted correctly if xdatacls.itestmode == 2: ypred=inverse_knn(xdatacls.YKernel.get_Y0(xdatacls.itrain), \ cPredictTes) else: ypred = cPredictTes.zPred ## cEvaluationTes=mmr_eval_binvector(cData.YTest,cPredictTes.zPred) cEvaluationTes= \ mmr_eval_binvector(xdatacls.YKernel.get_test(xdatacls.itest), \ ypred) xtime[2] = time.clock() - t0 xresult_test[iipar, irepeat, ifold] = cEvaluationTes.accuracy xpr[iipar, irepeat, ifold, 0] = cEvaluationTes.precision xpr[iipar, irepeat, ifold, 1] = cEvaluationTes.recall xpr[iipar, irepeat, ifold, 2] = cEvaluationTes.f1 xpr[iipar, irepeat, ifold, 3] = cEvaluationTes.accuracy print(cEvaluationTes.confusion) print(cEvaluationTes.classconfusion) try: xclassconfusion += cEvaluationTes.classconfusion except: (n, n) = cEvaluationTes.classconfusion.shape xclassconfusion = np.zeros((n, n)) xclassconfusion += cEvaluationTes.classconfusion ## mmr_eval_label(ZW,iPre,YTesN,Y0,kit_data,itest,params) ## #################################### print('Parameter:',iipar,'Repetition: ',irepeat, \ 'Fold: ',ifold) mmr_report.mmr_report('Result on one fold', xresult_train[iipar,irepeat,ifold], \ xresult_test[iipar,irepeat,ifold], \ xpr[iipar,irepeat,ifold,:]) print( np.sum(xpr[iipar, irepeat, :ifold + 1, :], 0) / (ifold + 1)) mmr_report.mmr_report('Result on one repetition', np.mean(xresult_train[iipar,irepeat,:]), \ np.mean(xresult_test[iipar,irepeat,:]), \ np.mean(xpr[iipar,irepeat,:,:],0)) mmr_report.mmr_report('Result on all repetitions @@@@@@@', np.mean(xresult_train[iipar,:,:].flatten()), \ np.mean(xresult_test[iipar,:,:].flatten()), \ np.mean(np.mean(xpr[iipar,:,:,:],0),0)) print('Average best parameters') xlabels = ('c', 'd', 'par1', 'par2') for i in range(nparam): print(xlabels[i],': ',np.mean(xbest_param[:,:,i]), \ '(',np.std(xbest_param[:,:,i]),')') print('xtime:', xtime) sys.stdout.flush() print('Bye') return