Exemplo n.º 1
0
def mmr_main(iworkmode):

    params = mmr_setparams.cls_params()

    np.set_printoptions(precision=4)

    dresult = {}
    ## ---------------------------------------------
    list_object = ['parts']
    nviews = 1
    lfile_in = [[[(3, 4, 21)]]]
    tfile_out = (3, 1, 6)

    lresult = []
    for iobject in range(len(lfile_in)):

        tfile_in = lfile_in[iobject]

        for ifeature in range(nviews):

            params.ninputview = len(tfile_in)
            cMMR = mmr_mmr_cls.cls_mmr(params.ninputview)
            nfold = cMMR.nfold
            nrepeat = cMMR.nrepeat
            cMMR.xbias = -0.95 - ifeature * 0.05
            print('Xbias:', cMMR.xbias)

            nscore = 4
            nipar = 1
            if cMMR.crossval_mode == 0:  ## random
                nfold0 = nfold
                xresult_test = np.zeros((nipar, nrepeat, nfold0))
                xresult_train = np.zeros((nipar, nrepeat, nfold0))
                xpr = np.zeros((nipar, nrepeat, nfold0, nscore))
            elif cMMR.crossval_mode == 1:  ## predefined trianing and test
                nrepeat = 1
                nfold0 = 1
                xresult_test = np.zeros((nipar, nrepeat, nfold0))
                xresult_train = np.zeros((nipar, nrepeat, nfold0))
                xpr = np.zeros((nipar, nrepeat, nfold0, nscore))

        ## @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@

            cdata_store = objpart_load_data.cls_label_files()
            cdata_store.load_mmr(cMMR, tfile_in, tfile_out)
            mdata = cMMR.mdata

            ## -----------------------------------------------
            print('Output kernel type: ',
                  cMMR.YKernel.kernel_params.kernel_type)
            for i in range(params.ninputview):
                print(i, 'Input kernel type: ',
                      cMMR.XKernel[i].kernel_params.kernel_type)
            ## -------------------------------------------------

            xtime = np.zeros(5)
            ## ############################################################
            nparam = 4  ## C,D,par1,par2
            xbest_param = np.zeros((nrepeat, nfold0, nparam))

            ## xinpar=[0.2,0.3,0.4,0.5,0.6]
            for iipar in range(nipar):

                print('===================================================')
                for irepeat in range(nrepeat):
                    ## split data into training and test
                    if cMMR.crossval_mode == 0:  ## random selection
                        xselector = np.floor(np.random.random(mdata) * nfold0)
                        xselector = xselector - (xselector == nfold0)
                    elif cMMR.crossval_mode == 1:  ## preddefined training and test
                        xselector = np.zeros(mdata)
                        xselector[cMMR.ifixtrain] = 1

                    for ifold in range(nfold0):
                        cMMR.split_train_test(xselector, ifold)

                        ## validation to choose the best parameters
                        print('Validation')
                        t0 = time.clock()
                        ## select the kernel to be validated
                        cMMR.set_validation()

                        cvalidation = mmr_validation_cls.cls_mmr_validation()
                        cvalidation.validation_rkernel = cMMR.XKernel[0].title
                        best_param = cvalidation.mmr_validation(cMMR)

                        xtime[0] = time.clock() - t0

                        print('Best parameters found by validation')
                        print('c: ', best_param.c)
                        print('d: ', best_param.d)
                        print('par1: ', best_param.par1)
                        print('par2: ', best_param.par2)
                        xbest_param[irepeat, ifold, 0] = best_param.c
                        xbest_param[irepeat, ifold, 1] = best_param.d
                        xbest_param[irepeat, ifold, 2] = best_param.par1
                        xbest_param[irepeat, ifold, 3] = best_param.par2

                        cMMR.compute_kernels()
                        cMMR.Y0 = cMMR.YKernel.get_train(
                            cMMR.itrain)  ## candidates

                        ## training with the best parameters
                        print('Training')

                        print(cMMR.YKernel.kernel_params.kernel_type, \
                              cMMR.YKernel.kernel_params.ipar1, \
                              cMMR.YKernel.kernel_params.ipar2)
                        for iview in range(cMMR.ninputview):
                            print(cMMR.XKernel[iview].kernel_params.kernel_type, \
                                  cMMR.XKernel[iview].kernel_params.ipar1, \
                                  cMMR.XKernel[iview].kernel_params.ipar2)

                        t0 = time.clock()
                        cOptDual = cMMR.mmr_train()
                        xtime[1] = time.clock() - t0
                        ## cls transfers the dual variables to the test procedure
                        ## compute tests
                        ## check the train accuracy
                        print('Test')
                        cPredictTra = cMMR.mmr_test(cOptDual, itraindata=0)
                        ## counts the proportion the ones predicted correctly
                        ## ######################################
                        if cMMR.itestmode == 2:
                            print('Test knn')
                            ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \
                                              cPredictTra)
                        else:
                            ypred = cPredictTra.zPred
                        cEvaluationTra= \
                              mmr_eval_binvector(cMMR.YKernel.get_train(cMMR.itrain), \
                                                 ypred)
                        xresult_train[iipar, irepeat,
                                      ifold] = cEvaluationTra.accuracy
                        print('>>>>>>>>>>>\n', cEvaluationTra.confusion)
                        ## ######################################
                        ## check the test accuracy
                        t0 = time.clock()
                        cPredictTes = cMMR.mmr_test(cOptDual, itraindata=1)
                        ## counts the proportion the ones predicted correctly
                        if cMMR.itestmode == 2:
                            ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \
                                              cPredictTes)
                        else:
                            ypred = cPredictTes.zPred
                        ## cEvaluationTes=mmr_eval_binvector(cData.YTest,cPredictTes.zPred)
                        cEvaluationTes= \
                              mmr_eval_binvector(cMMR.YKernel.get_test(cMMR.itest), \
                                                 ypred)

                        xtime[2] = time.clock() - t0
                        xresult_test[iipar, irepeat,
                                     ifold] = cEvaluationTes.accuracy

                        xpr[iipar, irepeat, ifold,
                            0] = cEvaluationTes.precision
                        xpr[iipar, irepeat, ifold, 1] = cEvaluationTes.recall
                        xpr[iipar, irepeat, ifold, 2] = cEvaluationTes.f1
                        xpr[iipar, irepeat, ifold, 3] = cEvaluationTes.accuracy

                        print(cEvaluationTes.confusion)
                        print(cEvaluationTes.classconfusion)
                        try:
                            xclassconfusion += cEvaluationTes.classconfusion
                        except:
                            (n, n) = cEvaluationTes.classconfusion.shape
                            xclassconfusion = np.zeros((n, n))
                            xclassconfusion += cEvaluationTes.classconfusion

            ## ####################################
                        print('Parameter:',iipar,'Repetition: ',irepeat, \
                              'Fold: ',ifold)
                        mmr_report('Result on one fold',
                                   xresult_train[iipar,irepeat,ifold], \
                                   xresult_test[iipar,irepeat,ifold], \
                                   xpr[iipar,irepeat,ifold,:])
                        print(
                            np.sum(xpr[iipar, irepeat, :ifold + 1, :], 0) /
                            (ifold + 1))

                    mmr_report('Result on one repetition',
                               np.mean(xresult_train[iipar,irepeat,:]), \
                               np.mean(xresult_test[iipar,irepeat,:]), \
                               np.mean(xpr[iipar,irepeat,:,:],0))

                mmr_report('Result on all repetitions @@@@@@@',
                           np.mean(xresult_train[iipar,:,:].flatten()), \
                           np.mean(xresult_test[iipar,:,:].flatten()), \
                           np.mean(np.mean(xpr[iipar,:,:,:],0),0))

                print('Average best parameters')
                ##  sfield=dir(best_param)
                xlabels = ('c', 'd', 'par1', 'par2')
                for i in range(nparam):
                    ##    print(sfield[i])
                    print(xlabels[i],': ',np.mean(xbest_param[:,:,i]), \
                          '(',np.std(xbest_param[:,:,i]),')')

                print('xtime:', xtime)
                sys.stdout.flush()

                dresult[ifeature] = (cMMR.xbias,
                                     np.mean(np.mean(xpr[iipar, :, :, :], 0),
                                             0))

        for sfeature_type, tresult in dresult.items():
            ## xhead=cMMR.xbias
            headkey = tfile_in[0][0]
            xhead = cdata_store.dirvar[headkey][0] + ', ' + cdata_store.dirvar[
                headkey][1]
            lresult.append((xhead, tresult))

        ## lresult.sort()
        ## for litem in lresult:
        ##   print(litem)

        print('\\begin{tabular}{l|rrr}')
        print('& \\multicolumn{3}{c}{' + 'Objects' + '} \\\\')
        print('Feature type & Precision & Recall & F1 \\\\ \\hline')
        for litem in lresult:
            print(litem[0],' & ','%6.4f'%litem[1][1][0], \
                  ' & ','%6.4f'%litem[1][1][1],' & ','%6.4f'%litem[1][1][2],' \\\\')
        print('\\end{tabular}')

    print(xclassconfusion)

    print('Bye')

    return
Exemplo n.º 2
0
def mmr_main(iworkmode, trainingBase, evalFile, performcl):

    params = mmr_setparams.cls_params()

    ## np.set_printoptions(precision=4)

    dresult = {}
    nview = 1

    nobject = 1

    ## !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    ## ################################################
    lviews = [0, 1]  ### this list could contain a subset of 0,1,2
    ## ################################################

    lresult = []
    for iobject in range(nobject):

        for ifeature in range(nview):

            params.ninputview = len(lviews)
            cMMR = mmr_mmr_cls.cls_mmr(params.ninputview)
            nfold = cMMR.nfold
            nrepeat = cMMR.nrepeat
            ## cMMR.xbias=-0.06  ## 4 categories
            cMMR.xbias = 0.02 - ifeature * 0.01
            ## cMMR.xbias=0.1-ifeature*0.01

            nscore = 4
            nipar = 1

            cMMR.crossval_mode = 1
            if cMMR.crossval_mode == 0:  ## random
                nfold0 = nfold
                xresult_test = np.zeros((nipar, nrepeat, nfold0))
                xresult_train = np.zeros((nipar, nrepeat, nfold0))
                xpr = np.zeros((nipar, nrepeat, nfold0, nscore))
            elif cMMR.crossval_mode == 1:  ## predefined trianing and test
                nrepeat = 1
                nfold0 = 1
                xresult_test = np.zeros((nipar, nrepeat, nfold0))
                xresult_train = np.zeros((nipar, nrepeat, nfold0))
                xpr = np.zeros((nipar, nrepeat, nfold0, nscore))

            cdata_store = trajlab_load_data.cls_label_files(
                trainingBase, evalFile, performcl)
            cdata_store.load_mmr(cMMR, lviews)
            mdata = cMMR.mdata

            xcross = np.zeros((mdata, mdata))

            ## !!!!!!!!!!!!!!!!!!
            ## params.validation.rkernel=cMMR.XKernel[0].title

            xtime = np.zeros(5)
            ## ############################################################
            nparam = 4  ## C,D,par1,par2
            xbest_param = np.zeros((nrepeat, nfold0, nparam))

            for iipar in range(nipar):

                for irepeat in range(nrepeat):
                    ## split data into training and test
                    if cMMR.crossval_mode == 0:  ## random selection
                        xselector = np.zeros(mdata)
                        ifold = 0
                        for i in range(mdata):
                            xselector[i] = ifold
                            ifold += 1
                            if ifold >= nfold0:
                                ifold = 0
                        np.random.shuffle(xselector)
                    elif cMMR.crossval_mode == 1:  ## preddefined training and test
                        # (added by simon) train with all data but the last one (not elegant, but works)
                        cMMR.ifixtrain = list(range(mdata - 1))
                        xselector = np.zeros(mdata)
                        xselector[cMMR.ifixtrain] = 1

                    for ifold in range(nfold0):
                        cMMR.split_train_test(xselector, ifold)

                        ## validation to choose the best parameters
                        t0 = time.clock()

                        ## !!!!!!!!!!!!!!!!!!!!!!!!!
                        cMMR.set_validation()
                        cvalidation = mmr_validation_cls.cls_mmr_validation()
                        ## !!!!!!!!!!!!!!!!!!!!!!!!! no parameter "params"
                        best_param = cvalidation.mmr_validation(cMMR)

                        xtime[0] = time.clock() - t0
                        xbest_param[irepeat, ifold, 0] = best_param.c
                        xbest_param[irepeat, ifold, 1] = best_param.d
                        xbest_param[irepeat, ifold, 2] = best_param.par1
                        xbest_param[irepeat, ifold, 3] = best_param.par2

                        cMMR.compute_kernels()
                        cMMR.Y0 = cMMR.YKernel.get_train(
                            cMMR.itrain)  ## candidates

                        t0 = time.clock()
                        ## !!!!!!!!!!!!!!!!!!!!!!! np "params"
                        cOptDual = cMMR.mmr_train()
                        xtime[1] = time.clock() - t0
                        ## cls transfers the dual variables to the test procedure
                        ## compute tests
                        ## check the train accuracy
                        ## !!!!!!!!!!!!!!!!!!!!!!! np "params"
                        cPredictTra = cMMR.mmr_test(cOptDual, itraindata=0)
                        ## counts the proportion the ones predicted correctly
                        ## ######################################
                        if cMMR.itestmode == 2:
                            ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \
                                              cPredictTra)
                        else:
                            ypred = cPredictTra.zPred
                        cEvaluationTra= \
                              mmr_eval_binvector(cMMR.YKernel.get_train(cMMR.itrain), \
                                                 ypred)
                        xresult_train[iipar, irepeat,
                                      ifold] = cEvaluationTra.accuracy
                        ## ######################################
                        ## check the test accuracy
                        t0 = time.clock()
                        ## !!!!!!!!!!!!!!!!!!!!!!! np "params"
                        cPredictTes = cMMR.mmr_test(cOptDual, itraindata=1)
                        ## counts the proportion the ones predicted correctly
                        if cMMR.itestmode == 2:
                            ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \
                                              cPredictTes)
                        else:
                            ypred = cPredictTes.zPred
                        ## cEvaluationTes=mmr_eval_binvector(cData.YTest,cPredictTes.zPred)
                        cEvaluationTes= \
                              mmr_eval_binvector(cMMR.YKernel.get_test(cMMR.itest), \
                                                 ypred)

                        xtime[2] = time.clock() - t0
                        xresult_test[iipar, irepeat,
                                     ifold] = cEvaluationTes.accuracy

                        xpr[iipar, irepeat, ifold,
                            0] = cEvaluationTes.precision
                        xpr[iipar, irepeat, ifold, 1] = cEvaluationTes.recall
                        xpr[iipar, irepeat, ifold, 2] = cEvaluationTes.f1
                        xpr[iipar, irepeat, ifold, 3] = cEvaluationTes.accuracy

                        # (added by simon) for now i will just add the new data to
                        # the dataset with a random label and check the confusion
                        # matrix --> very ugly solution but i cant figure it out
                        # in a clean way
                        # print(cEvaluationTes.classconfusion)
                        evaluatedRes = [
                            row[0] for row in cEvaluationTes.classconfusion
                        ]
                        evaluatedRes.append(cvalidation.validationScore)
                        #nonZeroIndexes = [i for i, e in enumerate(evaluatedRes) if e != 0]
                        #print(evaluatedRes)
                        #return nonZeroIndexes[0]
                        return evaluatedRes
                        try:
                            xclassconfusion += cEvaluationTes.classconfusion
                        except:
                            (n, n) = cEvaluationTes.classconfusion.shape
                            xclassconfusion = np.zeros((n, n))
                            xclassconfusion += cEvaluationTes.classconfusion
                        ## mmr_eval_label(ZW,iPre,YTesN,Y0,kit_data,itest,params)
                        mmr_report.mmr_report('Result on one fold',
                               xresult_train[iipar,irepeat,ifold], \
                               xresult_test[iipar,irepeat,ifold], \
                               xpr[iipar,irepeat,ifold,:])

                sys.stdout.flush()

                dresult[ifeature] = (cMMR.xbias,
                                     np.mean(np.mean(xpr[iipar, :, :, :], 0),
                                             0))

        for sfeature_type, tresult in dresult.items():
            ## xhead=cMMR.xbias
            xhead = ''
            lresult.append((xhead, tresult))

    return [-1]
Exemplo n.º 3
0
def mmr_main(iworkmode):

  params=mmr_setparams.cls_params()
  
  np.set_printoptions(precision=4)
  
  dresult={}
## ---------------------------------------------
  list_object=['parts']
  nviews=1
  lfile_in=[[[(3,4,21)]]]
  tfile_out=(3,1,6)

  lresult=[]
  for iobject in range(len(lfile_in)):

    tfile_in=lfile_in[iobject]
    
    for ifeature in range(nviews):

      params.ninputview=len(tfile_in)
      cMMR=mmr_mmr_cls.cls_mmr(params.ninputview)
      nfold=cMMR.nfold
      nrepeat=cMMR.nrepeat
      cMMR.xbias=-0.95-ifeature*0.05
      print('Xbias:',cMMR.xbias)

      nscore=4
      nipar=1
      if cMMR.crossval_mode==0:   ## random
        nfold0=nfold
        xresult_test=np.zeros((nipar,nrepeat,nfold0))
        xresult_train=np.zeros((nipar,nrepeat,nfold0))
        xpr=np.zeros((nipar,nrepeat,nfold0,nscore))
      elif cMMR.crossval_mode==1:  ## predefined trianing and test
        nrepeat=1
        nfold0=1
        xresult_test=np.zeros((nipar,nrepeat,nfold0))
        xresult_train=np.zeros((nipar,nrepeat,nfold0))
        xpr=np.zeros((nipar,nrepeat,nfold0,nscore))

    ## @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@

      cdata_store=objpart_load_data.cls_label_files()  
      cdata_store.load_mmr(cMMR,tfile_in,tfile_out)
      mdata=cMMR.mdata

      ## -----------------------------------------------
      print('Output kernel type: ',cMMR.YKernel.kernel_params.kernel_type)
      for i in range(params.ninputview):
        print(i,'Input kernel type: ',cMMR.XKernel[i].kernel_params.kernel_type)
      ## -------------------------------------------------

      xtime=np.zeros(5)
    ## ############################################################
      nparam=4    ## C,D,par1,par2
      xbest_param=np.zeros((nrepeat,nfold0,nparam))

      ## xinpar=[0.2,0.3,0.4,0.5,0.6]
      for iipar in range(nipar):

        print('===================================================')
        for irepeat in range(nrepeat):
        ## split data into training and test
          if cMMR.crossval_mode==0:  ## random selection
            xselector=np.floor(np.random.random(mdata)*nfold0)
            xselector=xselector-(xselector==nfold0)
          elif cMMR.crossval_mode==1: ## preddefined training and test
            xselector=np.zeros(mdata)
            xselector[cMMR.ifixtrain]=1

          for ifold in range(nfold0):
            cMMR.split_train_test(xselector,ifold)

            ## validation to choose the best parameters
            print('Validation')
            t0=time.clock()
            ## select the kernel to be validated
            cMMR.set_validation()

            cvalidation=mmr_validation_cls.cls_mmr_validation()
            cvalidation.validation_rkernel=cMMR.XKernel[0].title
            best_param=cvalidation.mmr_validation(cMMR)
          
            xtime[0]=time.clock()-t0

            print('Best parameters found by validation')
            print('c: ',best_param.c)
            print('d: ',best_param.d)
            print('par1: ',best_param.par1)
            print('par2: ',best_param.par2)
            xbest_param[irepeat,ifold,0]=best_param.c
            xbest_param[irepeat,ifold,1]=best_param.d
            xbest_param[irepeat,ifold,2]=best_param.par1
            xbest_param[irepeat,ifold,3]=best_param.par2

            cMMR.compute_kernels()
            cMMR.Y0=cMMR.YKernel.get_train(cMMR.itrain)   ## candidates

      ## training with the best parameters
            print('Training')

            print(cMMR.YKernel.kernel_params.kernel_type, \
                  cMMR.YKernel.kernel_params.ipar1, \
                  cMMR.YKernel.kernel_params.ipar2)
            for iview in range(cMMR.ninputview):
              print(cMMR.XKernel[iview].kernel_params.kernel_type, \
                    cMMR.XKernel[iview].kernel_params.ipar1, \
                    cMMR.XKernel[iview].kernel_params.ipar2)
              
            
            t0=time.clock()
            cOptDual=cMMR.mmr_train()
            xtime[1]=time.clock()-t0
      ## cls transfers the dual variables to the test procedure
      ## compute tests 
      ## check the train accuracy
            print('Test')
            cPredictTra=cMMR.mmr_test(cOptDual,itraindata=0)
      ## counts the proportion the ones predicted correctly    
      ## ######################################
            if cMMR.itestmode==2:
              print('Test knn')
              ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \
                                cPredictTra)
            else:
              ypred=cPredictTra.zPred
            cEvaluationTra= \
                  mmr_eval_binvector(cMMR.YKernel.get_train(cMMR.itrain), \
                                     ypred)
            xresult_train[iipar,irepeat,ifold]=cEvaluationTra.accuracy
            print('>>>>>>>>>>>\n',cEvaluationTra.confusion)
      ## ######################################     
      ## check the test accuracy
            t0=time.clock()
            cPredictTes= cMMR.mmr_test(cOptDual,itraindata=1)
      ## counts the proportion the ones predicted correctly
            if cMMR.itestmode==2:
              ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \
                                cPredictTes)
            else:
              ypred=cPredictTes.zPred
            ## cEvaluationTes=mmr_eval_binvector(cData.YTest,cPredictTes.zPred)
            cEvaluationTes= \
                  mmr_eval_binvector(cMMR.YKernel.get_test(cMMR.itest), \
                                     ypred)

            xtime[2]=time.clock()-t0
            xresult_test[iipar,irepeat,ifold]=cEvaluationTes.accuracy

            xpr[iipar,irepeat,ifold,0]=cEvaluationTes.precision
            xpr[iipar,irepeat,ifold,1]=cEvaluationTes.recall
            xpr[iipar,irepeat,ifold,2]=cEvaluationTes.f1
            xpr[iipar,irepeat,ifold,3]=cEvaluationTes.accuracy

            print(cEvaluationTes.confusion)
            print(cEvaluationTes.classconfusion)
            try:
              xclassconfusion+=cEvaluationTes.classconfusion
            except:
              (n,n)=cEvaluationTes.classconfusion.shape
              xclassconfusion=np.zeros((n,n))
              xclassconfusion+=cEvaluationTes.classconfusion

      ## ####################################
            print('Parameter:',iipar,'Repetition: ',irepeat, \
                  'Fold: ',ifold)
            mmr_report('Result on one fold',
                       xresult_train[iipar,irepeat,ifold], \
                       xresult_test[iipar,irepeat,ifold], \
                       xpr[iipar,irepeat,ifold,:])
            print(np.sum(xpr[iipar,irepeat,:ifold+1,:],0)/(ifold+1))

          mmr_report('Result on one repetition',
                     np.mean(xresult_train[iipar,irepeat,:]), \
                     np.mean(xresult_test[iipar,irepeat,:]), \
                     np.mean(xpr[iipar,irepeat,:,:],0))

        mmr_report('Result on all repetitions @@@@@@@',
                   np.mean(xresult_train[iipar,:,:].flatten()), \
                   np.mean(xresult_test[iipar,:,:].flatten()), \
                   np.mean(np.mean(xpr[iipar,:,:,:],0),0))



        print('Average best parameters')
      ##  sfield=dir(best_param)
        xlabels=('c','d','par1','par2')
        for i in range(nparam):
      ##    print(sfield[i])
          print(xlabels[i],': ',np.mean(xbest_param[:,:,i]), \
                '(',np.std(xbest_param[:,:,i]),')')

        print('xtime:',xtime)
        sys.stdout.flush()

        dresult[ifeature]=(cMMR.xbias,np.mean(np.mean(xpr[iipar,:,:,:],0),0))

    for sfeature_type,tresult in dresult.items():
      ## xhead=cMMR.xbias
      headkey=tfile_in[0][0]
      xhead=cdata_store.dirvar[headkey][0]+', '+cdata_store.dirvar[headkey][1]
      lresult.append((xhead,tresult))

    ## lresult.sort()
    ## for litem in lresult:
    ##   print(litem)

    print('\\begin{tabular}{l|rrr}')
    print('& \\multicolumn{3}{c}{'+'Objects'+'} \\\\')
    print('Feature type & Precision & Recall & F1 \\\\ \\hline')
    for litem in lresult:
      print(litem[0],' & ','%6.4f'%litem[1][1][0], \
            ' & ','%6.4f'%litem[1][1][1],' & ','%6.4f'%litem[1][1][2],' \\\\')
    print('\\end{tabular}')  

  print(xclassconfusion)

  print('Bye')
  
  return
Exemplo n.º 4
0
def mmr_main(iworkmode):

  params=mmr_setparams.cls_params()
  np.set_printoptions(precision=4)
  
  dresult={}
## ---------------------------------------------
  nview=1
  nobject=1
  params.ninputview=nview

  lresult=[]

  for iobject in range(nobject):

    for ifeature in range(nview):

      cMMR=mmr_mmr_cls.cls_mmr(params.ninputview)
      nfold=cMMR.nfold
      nrepeat=cMMR.nrepeat
      ## cMMR.xbias=-0.06  ## 4 categories
      cMMR.xbias=0.0 
      ## cMMR.xbias=0.1-ifeature*0.01 
      print('Xbias:',cMMR.xbias)

      nscore=4
      nipar=1
      if cMMR.crossval_mode==0:   ## random
        nfold0=nfold
        xresult_test=np.zeros((nipar,nrepeat,nfold0))
        xresult_train=np.zeros((nipar,nrepeat,nfold0))
        xpr=np.zeros((nipar,nrepeat,nfold0,nscore))
      elif cMMR.crossval_mode==1:  ## predefined trianing and test
        nrepeat=1
        nfold0=1
        xresult_test=np.zeros((nipar,nrepeat,nfold0))
        xresult_train=np.zeros((nipar,nrepeat,nfold0))
        xpr=np.zeros((nipar,nrepeat,nfold0,nscore))

    ## @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@

      ## cMMR=mmr_mmr_cls.cls_mmr(params.ninputview)

      cdata_store=vision_load_data.cls_label_files()  
      cdata_store.load_mmr(cMMR)
      mdata=cMMR.mdata

      ## -----------------------------------------------
      print('Output kernel type: ',cMMR.YKernel.kernel_params.kernel_type)
      for i in range(params.ninputview):
        print(i,'Input kernel type: ',cMMR.XKernel[i].kernel_params.kernel_type)
      ## -------------------------------------------------

      xcross=np.zeros((mdata,mdata))

      xtime=np.zeros(5)
    ## ############################################################
      nparam=4    ## C,D,par1,par2
      xbest_param=np.zeros((nrepeat,nfold0,nparam))

      for iipar in range(nipar):
        
        print('===================================================')
        for irepeat in range(nrepeat):
        ## split data into training and test
          if cMMR.crossval_mode==0:  ## random selection
            xselector=np.zeros(mdata)
            ifold=0
            for i in range(mdata):
              xselector[i]=ifold
              ifold+=1
              if ifold>=nfold0:
                ifold=0
            np.random.shuffle(xselector)
            ## xselector=np.floor(np.random.random(mdata)*nfold0)
            ## xselector=xselector-(xselector==nfold0)
          elif cMMR.crossval_mode==1: ## preddefined training and test
            xselector=np.zeros(mdata)
            xselector[cMMR.ifixtrain]=1

          for ifold in range(nfold0):
            cMMR.split_train_test(xselector,ifold)

            ## validation to choose the best parameters
            print('Validation')
            t0=time.clock()
            ## select the kernel to be validated
            cMMR.set_validation()

            cvalidation=mmr_validation_cls.cls_mmr_validation()
            cvalidation.validation_rkernel=cMMR.XKernel[0].title
            best_param=cvalidation.mmr_validation(cMMR)


            xtime[0]=time.clock()-t0

            print('Best parameters found by validation')
            print('c: ',best_param.c)
            print('d: ',best_param.d)
            print('par1: ',best_param.par1)
            print('par2: ',best_param.par2)
            xbest_param[irepeat,ifold,0]=best_param.c
            xbest_param[irepeat,ifold,1]=best_param.d
            xbest_param[irepeat,ifold,2]=best_param.par1
            xbest_param[irepeat,ifold,3]=best_param.par2

            cMMR.compute_kernels()
            cMMR.Y0=cMMR.YKernel.get_train(cMMR.itrain)   ## candidates

      ## training with the best parameters
            print('Training')

            print(cMMR.YKernel.kernel_params.kernel_type, \
                  cMMR.YKernel.kernel_params.ipar1, \
                  cMMR.YKernel.kernel_params.ipar2)
            for iview in range(cMMR.ninputview):
              print(cMMR.XKernel[iview].kernel_params.kernel_type, \
                    cMMR.XKernel[iview].kernel_params.ipar1, \
                    cMMR.XKernel[iview].kernel_params.ipar2)
              
            
            t0=time.clock()
            cOptDual=cMMR.mmr_train()
            xtime[1]=time.clock()-t0
      ## cls transfers the dual variables to the test procedure
      ## compute tests 
      ## check the train accuracy
            print('Test')
            cPredictTra=cMMR.mmr_test(cOptDual,itraindata=0)
      ## counts the proportion the ones predicted correctly    
      ## ######################################
            if cMMR.itestmode==2:
              print('Test knn')
              ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \
                                cPredictTra)
            else:
              ypred=cPredictTra.zPred
            cEvaluationTra= \
                  mmr_eval_binvector(cMMR.YKernel.get_train(cMMR.itrain), \
                                     ypred)
            xresult_train[iipar,irepeat,ifold]=cEvaluationTra.accuracy
            print('>>>>>>>>>>>\n',cEvaluationTra.confusion)
      ## ######################################     
      ## check the test accuracy
            t0=time.clock()
            cPredictTes= cMMR.mmr_test(cOptDual,itraindata=1)
      ## counts the proportion the ones predicted correctly
            if cMMR.itestmode==2:
              ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \
                                cPredictTes)
            else:
              ypred=cPredictTes.zPred
            ## cEvaluationTes=mmr_eval_binvector(cData.YTest,cPredictTes.zPred)
            cEvaluationTes= \
                  mmr_eval_binvector(cMMR.YKernel.get_test(cMMR.itest), \
                                     ypred)

            xtime[2]=time.clock()-t0
            xresult_test[iipar,irepeat,ifold]=cEvaluationTes.accuracy

            xpr[iipar,irepeat,ifold,0]=cEvaluationTes.precision
            xpr[iipar,irepeat,ifold,1]=cEvaluationTes.recall
            xpr[iipar,irepeat,ifold,2]=cEvaluationTes.f1
            xpr[iipar,irepeat,ifold,3]=cEvaluationTes.accuracy

            print(cEvaluationTes.confusion)
            print(cEvaluationTes.classconfusion)
            try:
              xclassconfusion+=cEvaluationTes.classconfusion
            except:
              (n,n)=cEvaluationTes.classconfusion.shape
              xclassconfusion=np.zeros((n,n))
              xclassconfusion+=cEvaluationTes.classconfusion
            ## mmr_eval_label(ZW,iPre,YTesN,Y0,kit_data,itest,params)

      ## ####################################
            print('Parameter:',iipar,'Repetition: ',irepeat, \
                  'Fold: ',ifold)
            mmr_report('Result on one fold',
                       xresult_train[iipar,irepeat,ifold], \
                       xresult_test[iipar,irepeat,ifold], \
                       xpr[iipar,irepeat,ifold,:])
            print(np.sum(xpr[iipar,irepeat,:ifold+1,:],0)/(ifold+1))

          mmr_report('Result on one repetition',
                     np.mean(xresult_train[iipar,irepeat,:]), \
                     np.mean(xresult_test[iipar,irepeat,:]), \
                     np.mean(xpr[iipar,irepeat,:,:],0))

        mmr_report('Result on all repetitions @@@@@@@',
                   np.mean(xresult_train[iipar,:,:].flatten()), \
                   np.mean(xresult_test[iipar,:,:].flatten()), \
                   np.mean(np.mean(xpr[iipar,:,:,:],0),0))



        print('Average best parameters')
      ##  sfield=dir(best_param)
        xlabels=('c','d','par1','par2')
        for i in range(nparam):
      ##    print(sfield[i])
          print(xlabels[i],': ',np.mean(xbest_param[:,:,i]), \
                '(',np.std(xbest_param[:,:,i]),')')

        print('xtime:',xtime)
        sys.stdout.flush()

        dresult[ifeature]=(cMMR.xbias,np.mean(np.mean(xpr[iipar,:,:,:],0),0))

    for sfeature_type,tresult in dresult.items():
      ## xhead=cMMR.xbias
      xhead=''
      lresult.append((xhead,tresult))

    ## lresult.sort()
    ## for litem in lresult:
    ##   print(litem)

    print('\\begin{tabular}{l|rrr}')
    print('& \\multicolumn{3}{c}{'+'Objects'+'} \\\\')
    print('Feature type & Precision & Recall & F1 \\\\ \\hline')
    for litem in lresult:
      print(litem[0],' & ','%6.4f'%litem[1][1][0], \
            ' & ','%6.4f'%litem[1][1][1],' & ','%6.4f'%litem[1][1][2],' \\\\')
    print('\\end{tabular}')  

    ## print('\\begin{tabular}{l|rrr}')
    ## print('& \\multicolumn{3}{c}{'+'Objects'+'} \\\\')
    ## print('Feature & xbias & Precision & Recall & F1 \\\\ \\hline')
    ## for litem in lresult:
    ##   print(litem[0],' & ','%6.4f'%litem[1][0],' & ','%6.4f'%litem[1][1][0], \
    ##         ' & ','%6.4f'%litem[1][1][1],' & ','%6.4f'%litem[1][1][2],' \\\\')
    ## print('\\end{tabular}')  


  ## ##########################################################
  ## !!!! It saves the optimal dual variables, and optimal, crossvalidated,
  ##  kernel parameters into files given in vision_load_data.

  ## prepare full training with the best parameters

  ifold=0
  xselector=np.ones(mdata)
  cMMR.split_train_test(xselector,ifold)
  best_param=np.array([ np.mean(xbest_param[:,:,i]) for i in range(nparam)])
  cMMR.penalty.c=best_param[0]
  cMMR.penalty.d=best_param[1]
  cMMR.XKernel[0].kernel_params.ipar1=best_param[2]
  cMMR.XKernel[0].kernel_params.ipar2=best_param[3]

  cMMR.compute_kernels()
  cMMR.Y0=cMMR.YKernel.get_train(cMMR.itrain)   ## candidates
  ## training with the best parameters
  print('Full training')
  cOptDual=cMMR.mmr_train()

  np.savetxt(cdata_store.sbasedir+cdata_store.dual_params,cMMR.dual.alpha, \
             fmt='%9.4f')
  np.savetxt(cdata_store.sbasedir+cdata_store.kernel_params,best_param[2:], \
             fmt='%9.4f')

  print(xclassconfusion)

  print('Bye')
  
  return
Exemplo n.º 5
0
def mmr_main(iworkmode, trainingBase, evalFile, performcl):

  params=mmr_setparams.cls_params()
  
  ## np.set_printoptions(precision=4)
  
  dresult={}
  nview=1

  nobject=1

  ## !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  ## ################################################
  lviews=[0,1] ### this list could contain a subset of 0,1,2
  ## ################################################

  lresult=[]
  for iobject in range(nobject):

    for ifeature in range(nview):

      params.ninputview=len(lviews)
      cMMR=mmr_mmr_cls.cls_mmr(params.ninputview)
      nfold=cMMR.nfold
      nrepeat=cMMR.nrepeat
      ## cMMR.xbias=-0.06  ## 4 categories
      cMMR.xbias=0.02-ifeature*0.01 
      ## cMMR.xbias=0.1-ifeature*0.01 

      nscore=4
      nipar=1
      
      cMMR.crossval_mode = 1
      if cMMR.crossval_mode==0:   ## random
        nfold0=nfold
        xresult_test=np.zeros((nipar,nrepeat,nfold0))
        xresult_train=np.zeros((nipar,nrepeat,nfold0))
        xpr=np.zeros((nipar,nrepeat,nfold0,nscore))
      elif cMMR.crossval_mode==1:  ## predefined trianing and test
        nrepeat=1
        nfold0=1
        xresult_test=np.zeros((nipar,nrepeat,nfold0))
        xresult_train=np.zeros((nipar,nrepeat,nfold0))
        xpr=np.zeros((nipar,nrepeat,nfold0,nscore))

      cdata_store = trajlab_load_data.cls_label_files(trainingBase, evalFile, performcl)  
      cdata_store.load_mmr(cMMR, lviews)
      mdata=cMMR.mdata

      xcross=np.zeros((mdata,mdata))

      ## !!!!!!!!!!!!!!!!!!
      ## params.validation.rkernel=cMMR.XKernel[0].title

      xtime=np.zeros(5)
    ## ############################################################
      nparam=4    ## C,D,par1,par2
      xbest_param=np.zeros((nrepeat,nfold0,nparam))

      for iipar in range(nipar):
        
        for irepeat in range(nrepeat):
        ## split data into training and test
          if cMMR.crossval_mode==0:  ## random selection
            xselector=np.zeros(mdata)
            ifold=0
            for i in range(mdata):
              xselector[i]=ifold
              ifold+=1
              if ifold>=nfold0:
                ifold=0
            np.random.shuffle(xselector)
          elif cMMR.crossval_mode==1: ## preddefined training and test
      # (added by simon) train with all data but the last one (not elegant, but works)
            cMMR.ifixtrain = list(range(mdata - 1))
            xselector = np.zeros(mdata)
            xselector[cMMR.ifixtrain] = 1

          for ifold in range(nfold0):
            cMMR.split_train_test(xselector,ifold)

            ## validation to choose the best parameters
            t0 = time.clock()

            ## !!!!!!!!!!!!!!!!!!!!!!!!!
            cMMR.set_validation()
            cvalidation=mmr_validation_cls.cls_mmr_validation()
            ## !!!!!!!!!!!!!!!!!!!!!!!!! no parameter "params"
            best_param = cvalidation.mmr_validation(cMMR)
              
            xtime[0] = time.clock() - t0
            xbest_param[irepeat,ifold,0]=best_param.c
            xbest_param[irepeat,ifold,1]=best_param.d
            xbest_param[irepeat,ifold,2]=best_param.par1
            xbest_param[irepeat,ifold,3]=best_param.par2

            cMMR.compute_kernels()
            cMMR.Y0=cMMR.YKernel.get_train(cMMR.itrain)   ## candidates
            
            t0=time.clock()
            ## !!!!!!!!!!!!!!!!!!!!!!! np "params"
            cOptDual=cMMR.mmr_train()
            xtime[1]=time.clock()-t0
      ## cls transfers the dual variables to the test procedure
      ## compute tests 
      ## check the train accuracy
            ## !!!!!!!!!!!!!!!!!!!!!!! np "params"
            cPredictTra = cMMR.mmr_test(cOptDual,itraindata=0)
      ## counts the proportion the ones predicted correctly    
      ## ######################################
            if cMMR.itestmode==2:
              ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \
                                cPredictTra)
            else:
              ypred=cPredictTra.zPred
            cEvaluationTra= \
                  mmr_eval_binvector(cMMR.YKernel.get_train(cMMR.itrain), \
                                     ypred)
            xresult_train[iipar,irepeat,ifold]=cEvaluationTra.accuracy
      ## ######################################     
      ## check the test accuracy
            t0=time.clock()
            ## !!!!!!!!!!!!!!!!!!!!!!! np "params"
            cPredictTes = cMMR.mmr_test(cOptDual,itraindata=1)
      ## counts the proportion the ones predicted correctly
            if cMMR.itestmode==2:
              ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \
                                cPredictTes)
            else:
              ypred=cPredictTes.zPred
            ## cEvaluationTes=mmr_eval_binvector(cData.YTest,cPredictTes.zPred)
            cEvaluationTes= \
                  mmr_eval_binvector(cMMR.YKernel.get_test(cMMR.itest), \
                                     ypred)

            xtime[2] = time.clock() - t0
            xresult_test[iipar,irepeat,ifold] = cEvaluationTes.accuracy

            xpr[iipar,irepeat,ifold,0]=cEvaluationTes.precision
            xpr[iipar,irepeat,ifold,1]=cEvaluationTes.recall
            xpr[iipar,irepeat,ifold,2]=cEvaluationTes.f1
            xpr[iipar,irepeat,ifold,3]=cEvaluationTes.accuracy

            # (added by simon) for now i will just add the new data to
            # the dataset with a random label and check the confusion
            # matrix --> very ugly solution but i cant figure it out
            # in a clean way
            # print(cEvaluationTes.classconfusion)
            evaluatedRes = [row[0] for row in cEvaluationTes.classconfusion]
            evaluatedRes.append(cvalidation.validationScore)
            #nonZeroIndexes = [i for i, e in enumerate(evaluatedRes) if e != 0]
            #print(evaluatedRes)
            #return nonZeroIndexes[0]
            return evaluatedRes
            try:
              xclassconfusion+=cEvaluationTes.classconfusion
            except:
              (n,n)=cEvaluationTes.classconfusion.shape
              xclassconfusion=np.zeros((n,n))
              xclassconfusion+=cEvaluationTes.classconfusion
            ## mmr_eval_label(ZW,iPre,YTesN,Y0,kit_data,itest,params)
            mmr_report.mmr_report('Result on one fold',
                   xresult_train[iipar,irepeat,ifold], \
                   xresult_test[iipar,irepeat,ifold], \
                   xpr[iipar,irepeat,ifold,:])

        sys.stdout.flush()

        dresult[ifeature]=(cMMR.xbias,np.mean(np.mean(xpr[iipar,:,:,:],0),0))

    for sfeature_type,tresult in dresult.items():
      ## xhead=cMMR.xbias
      xhead=''
      lresult.append((xhead,tresult))
  
  return [-1]
Exemplo n.º 6
0
def mmr_main(iworkmode):

  params=mmr_setparams.cls_params()



## @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@

  list_features=["annot","DenseHue","DenseHueV3H1", \
                "DenseSift","DenseSiftV3H1","Gist", \
                "HarrisHue","HarrisHueV3H1","HarrisSift", \
                "HarrisSiftV3H1","Hsv","HsvV3H1","Lab", \
                "LabV3H1","Rgb","RgbV3H1"]

  ## data files in the corresponding directories
  datadirs=['corel5k','espgame','iaprtc12','mirflickr','pascal07']

  ## Example lfeatures=[4,8] means we selected the features:
  ##                                "DenseSiftV3H1" and  "HarrisSift"   
  lfeatures=[4]
  params.ninputview=len(lfeatures)
  idata=0       ## process corel5k data set

  xdatacls=mmr_mmr_cls.cls_mmr(params.ninputview)
  nfold=xdatacls.nfold
  nrepeat=xdatacls.nrepeat
  print('Xbias:',xdatacls.xbias)

  cdata_store=mmr_load_data.cls_data_load()  
  cdata_store.load_data(xdatacls,idata,lfeatures)
  mdata=xdatacls.mdata

  ## initializing the array collecting the results
  nscore=4
  nipar=1
  if xdatacls.crossval_mode==0:   ## random
    nfold0=nfold
    xresult_test=np.zeros((nipar,nrepeat,nfold0))
    xresult_train=np.zeros((nipar,nrepeat,nfold0))
    xpr=np.zeros((nipar,nrepeat,nfold0,nscore))
  elif xdatacls.crossval_mode==1:  ## predefined trianing and test
    nrepeat=1
    nfold0=1
    xresult_test=np.zeros((nipar,nrepeat,nfold0))
    xresult_train=np.zeros((nipar,nrepeat,nfold0))
    xpr=np.zeros((nipar,nrepeat,nfold0,nscore))


  ## -----------------------------------------------
  print('Output kernel type: ',xdatacls.YKernel.kernel_params.kernel_type)
  for i in range(params.ninputview):
    print(i,'Input kernel type: ',xdatacls.XKernel[i].kernel_params.kernel_type)
  ## -------------------------------------------------

  xcross=np.zeros((mdata,mdata))

  xtime=np.zeros(5)
## ############################################################
  nparam=4    ## C,D,par1,par2
  xbest_param=np.zeros((nrepeat,nfold0,nparam))

  for iipar in range(nipar):

    print('===================================================')
    for irepeat in range(nrepeat):

      xdatacls.prepare_repetition_training(nfold0)

      for ifold in range(nfold0):

        xdatacls.prepare_fold_training(ifold)

        ## validation to choose the best parameters
        print('Validation')
        t0=time.clock()
        xdatacls.set_validation()
        cvalidation=mmr_validation_cls.cls_mmr_validation()
        cvalidation.validation_rkernel=xdatacls.XKernel[0].title
        best_param=cvalidation.mmr_validation(xdatacls)

        xtime[0]=time.clock()-t0

        print('Best parameters found by validation')
        print('c: ',best_param.c)
        print('d: ',best_param.d)
        print('par1: ',best_param.par1)
        print('par2: ',best_param.par2)
        xbest_param[irepeat,ifold,0]=best_param.c
        xbest_param[irepeat,ifold,1]=best_param.d
        xbest_param[irepeat,ifold,2]=best_param.par1
        xbest_param[irepeat,ifold,3]=best_param.par2

        xdatacls.compute_kernels()
        xdatacls.Y0=xdatacls.YKernel.get_train(xdatacls.itrain)   ## candidates

  ## training with the best parameters
        print('Training')

        print(xdatacls.YKernel.kernel_params.kernel_type, \
              xdatacls.YKernel.kernel_params.ipar1, \
              xdatacls.YKernel.kernel_params.ipar2)
        for iview in range(xdatacls.ninputview):
          print(xdatacls.XKernel[iview].kernel_params.kernel_type, \
                xdatacls.XKernel[iview].kernel_params.ipar1, \
                xdatacls.XKernel[iview].kernel_params.ipar2)


        t0=time.clock()
        cOptDual=xdatacls.mmr_train()
        xtime[1]=time.clock()-t0
  ## cls transfers the dual variables to the test procedure
  ## compute tests 
  ## check the train accuracy
        print('Test')
        cPredictTra=xdatacls.mmr_test(cOptDual,itraindata=0)
  ## counts the proportion the ones predicted correctly    
  ## ######################################
        if xdatacls.itestmode==2:
          print('Test knn')
          ypred=inverse_knn(xdatacls.YKernel.get_Y0(xdatacls.itrain), \
                            cPredictTra)
        else:
          ypred=cPredictTra.zPred
        cEvaluationTra= \
              mmr_eval_binvector(xdatacls.YKernel.get_train(xdatacls.itrain), \
                                 ypred)
        xresult_train[iipar,irepeat,ifold]=cEvaluationTra.accuracy
        print('>>>>>>>>>>>\n',cEvaluationTra.confusion)
  ## ######################################     
  ## check the test accuracy
        t0=time.clock()
        cPredictTes= xdatacls.mmr_test(cOptDual,itraindata=1)
  ## counts the proportion the ones predicted correctly
        if xdatacls.itestmode==2:
          ypred=inverse_knn(xdatacls.YKernel.get_Y0(xdatacls.itrain), \
                            cPredictTes)
        else:
          ypred=cPredictTes.zPred
        ## cEvaluationTes=mmr_eval_binvector(cData.YTest,cPredictTes.zPred)
        cEvaluationTes= \
              mmr_eval_binvector(xdatacls.YKernel.get_test(xdatacls.itest), \
                                 ypred)

        xtime[2]=time.clock()-t0
        xresult_test[iipar,irepeat,ifold]=cEvaluationTes.accuracy

        xpr[iipar,irepeat,ifold,0]=cEvaluationTes.precision
        xpr[iipar,irepeat,ifold,1]=cEvaluationTes.recall
        xpr[iipar,irepeat,ifold,2]=cEvaluationTes.f1
        xpr[iipar,irepeat,ifold,3]=cEvaluationTes.accuracy

        print(cEvaluationTes.confusion)
        print(cEvaluationTes.classconfusion)
        try:
          xclassconfusion+=cEvaluationTes.classconfusion
        except:
          (n,n)=cEvaluationTes.classconfusion.shape
          xclassconfusion=np.zeros((n,n))
          xclassconfusion+=cEvaluationTes.classconfusion
        ## mmr_eval_label(ZW,iPre,YTesN,Y0,kit_data,itest,params)

  ## ####################################
        print('Parameter:',iipar,'Repetition: ',irepeat, \
              'Fold: ',ifold)
        mmr_report.mmr_report('Result on one fold',
                   xresult_train[iipar,irepeat,ifold], \
                   xresult_test[iipar,irepeat,ifold], \
                   xpr[iipar,irepeat,ifold,:])
        print(np.sum(xpr[iipar,irepeat,:ifold+1,:],0)/(ifold+1))

      mmr_report.mmr_report('Result on one repetition',
                 np.mean(xresult_train[iipar,irepeat,:]), \
                 np.mean(xresult_test[iipar,irepeat,:]), \
                 np.mean(xpr[iipar,irepeat,:,:],0))

    mmr_report.mmr_report('Result on all repetitions @@@@@@@',
               np.mean(xresult_train[iipar,:,:].flatten()), \
               np.mean(xresult_test[iipar,:,:].flatten()), \
               np.mean(np.mean(xpr[iipar,:,:,:],0),0))



    print('Average best parameters')
    xlabels=('c','d','par1','par2')
    for i in range(nparam):
      print(xlabels[i],': ',np.mean(xbest_param[:,:,i]), \
              '(',np.std(xbest_param[:,:,i]),')')

    print('xtime:',xtime)
    sys.stdout.flush()

  print('Bye')
  
  return
Exemplo n.º 7
0
def mmr_main(iworkmode):

    params = mmr_setparams.cls_params()
    np.set_printoptions(precision=4)

    dresult = {}
    ## ---------------------------------------------
    nview = 1
    nobject = 1
    params.ninputview = nview

    lresult = []

    for iobject in range(nobject):

        for ifeature in range(nview):

            cMMR = mmr_mmr_cls.cls_mmr(params.ninputview)
            nfold = cMMR.nfold
            nrepeat = cMMR.nrepeat
            ## cMMR.xbias=-0.06  ## 4 categories
            cMMR.xbias = 0.0
            ## cMMR.xbias=0.1-ifeature*0.01
            print('Xbias:', cMMR.xbias)

            nscore = 4
            nipar = 1
            if cMMR.crossval_mode == 0:  ## random
                nfold0 = nfold
                xresult_test = np.zeros((nipar, nrepeat, nfold0))
                xresult_train = np.zeros((nipar, nrepeat, nfold0))
                xpr = np.zeros((nipar, nrepeat, nfold0, nscore))
            elif cMMR.crossval_mode == 1:  ## predefined trianing and test
                nrepeat = 1
                nfold0 = 1
                xresult_test = np.zeros((nipar, nrepeat, nfold0))
                xresult_train = np.zeros((nipar, nrepeat, nfold0))
                xpr = np.zeros((nipar, nrepeat, nfold0, nscore))

        ## @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@

        ## cMMR=mmr_mmr_cls.cls_mmr(params.ninputview)

            cdata_store = vision_load_data.cls_label_files()
            cdata_store.load_mmr(cMMR)
            mdata = cMMR.mdata

            ## -----------------------------------------------
            print('Output kernel type: ',
                  cMMR.YKernel.kernel_params.kernel_type)
            for i in range(params.ninputview):
                print(i, 'Input kernel type: ',
                      cMMR.XKernel[i].kernel_params.kernel_type)
            ## -------------------------------------------------

            xcross = np.zeros((mdata, mdata))

            xtime = np.zeros(5)
            ## ############################################################
            nparam = 4  ## C,D,par1,par2
            xbest_param = np.zeros((nrepeat, nfold0, nparam))

            for iipar in range(nipar):

                print('===================================================')
                for irepeat in range(nrepeat):
                    ## split data into training and test
                    if cMMR.crossval_mode == 0:  ## random selection
                        xselector = np.zeros(mdata)
                        ifold = 0
                        for i in range(mdata):
                            xselector[i] = ifold
                            ifold += 1
                            if ifold >= nfold0:
                                ifold = 0
                        np.random.shuffle(xselector)
                        ## xselector=np.floor(np.random.random(mdata)*nfold0)
                        ## xselector=xselector-(xselector==nfold0)
                    elif cMMR.crossval_mode == 1:  ## preddefined training and test
                        xselector = np.zeros(mdata)
                        xselector[cMMR.ifixtrain] = 1

                    for ifold in range(nfold0):
                        cMMR.split_train_test(xselector, ifold)

                        ## validation to choose the best parameters
                        print('Validation')
                        t0 = time.clock()
                        ## select the kernel to be validated
                        cMMR.set_validation()

                        cvalidation = mmr_validation_cls.cls_mmr_validation()
                        cvalidation.validation_rkernel = cMMR.XKernel[0].title
                        best_param = cvalidation.mmr_validation(cMMR)

                        xtime[0] = time.clock() - t0

                        print('Best parameters found by validation')
                        print('c: ', best_param.c)
                        print('d: ', best_param.d)
                        print('par1: ', best_param.par1)
                        print('par2: ', best_param.par2)
                        xbest_param[irepeat, ifold, 0] = best_param.c
                        xbest_param[irepeat, ifold, 1] = best_param.d
                        xbest_param[irepeat, ifold, 2] = best_param.par1
                        xbest_param[irepeat, ifold, 3] = best_param.par2

                        cMMR.compute_kernels()
                        cMMR.Y0 = cMMR.YKernel.get_train(
                            cMMR.itrain)  ## candidates

                        ## training with the best parameters
                        print('Training')

                        print(cMMR.YKernel.kernel_params.kernel_type, \
                              cMMR.YKernel.kernel_params.ipar1, \
                              cMMR.YKernel.kernel_params.ipar2)
                        for iview in range(cMMR.ninputview):
                            print(cMMR.XKernel[iview].kernel_params.kernel_type, \
                                  cMMR.XKernel[iview].kernel_params.ipar1, \
                                  cMMR.XKernel[iview].kernel_params.ipar2)

                        t0 = time.clock()
                        cOptDual = cMMR.mmr_train()
                        xtime[1] = time.clock() - t0
                        ## cls transfers the dual variables to the test procedure
                        ## compute tests
                        ## check the train accuracy
                        print('Test')
                        cPredictTra = cMMR.mmr_test(cOptDual, itraindata=0)
                        ## counts the proportion the ones predicted correctly
                        ## ######################################
                        if cMMR.itestmode == 2:
                            print('Test knn')
                            ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \
                                              cPredictTra)
                        else:
                            ypred = cPredictTra.zPred
                        cEvaluationTra= \
                              mmr_eval_binvector(cMMR.YKernel.get_train(cMMR.itrain), \
                                                 ypred)
                        xresult_train[iipar, irepeat,
                                      ifold] = cEvaluationTra.accuracy
                        print('>>>>>>>>>>>\n', cEvaluationTra.confusion)
                        ## ######################################
                        ## check the test accuracy
                        t0 = time.clock()
                        cPredictTes = cMMR.mmr_test(cOptDual, itraindata=1)
                        ## counts the proportion the ones predicted correctly
                        if cMMR.itestmode == 2:
                            ypred=inverse_knn(cMMR.YKernel.get_Y0(cMMR.itrain), \
                                              cPredictTes)
                        else:
                            ypred = cPredictTes.zPred
                        ## cEvaluationTes=mmr_eval_binvector(cData.YTest,cPredictTes.zPred)
                        cEvaluationTes= \
                              mmr_eval_binvector(cMMR.YKernel.get_test(cMMR.itest), \
                                                 ypred)

                        xtime[2] = time.clock() - t0
                        xresult_test[iipar, irepeat,
                                     ifold] = cEvaluationTes.accuracy

                        xpr[iipar, irepeat, ifold,
                            0] = cEvaluationTes.precision
                        xpr[iipar, irepeat, ifold, 1] = cEvaluationTes.recall
                        xpr[iipar, irepeat, ifold, 2] = cEvaluationTes.f1
                        xpr[iipar, irepeat, ifold, 3] = cEvaluationTes.accuracy

                        print(cEvaluationTes.confusion)
                        print(cEvaluationTes.classconfusion)
                        try:
                            xclassconfusion += cEvaluationTes.classconfusion
                        except:
                            (n, n) = cEvaluationTes.classconfusion.shape
                            xclassconfusion = np.zeros((n, n))
                            xclassconfusion += cEvaluationTes.classconfusion
                        ## mmr_eval_label(ZW,iPre,YTesN,Y0,kit_data,itest,params)

            ## ####################################
                        print('Parameter:',iipar,'Repetition: ',irepeat, \
                              'Fold: ',ifold)
                        mmr_report('Result on one fold',
                                   xresult_train[iipar,irepeat,ifold], \
                                   xresult_test[iipar,irepeat,ifold], \
                                   xpr[iipar,irepeat,ifold,:])
                        print(
                            np.sum(xpr[iipar, irepeat, :ifold + 1, :], 0) /
                            (ifold + 1))

                    mmr_report('Result on one repetition',
                               np.mean(xresult_train[iipar,irepeat,:]), \
                               np.mean(xresult_test[iipar,irepeat,:]), \
                               np.mean(xpr[iipar,irepeat,:,:],0))

                mmr_report('Result on all repetitions @@@@@@@',
                           np.mean(xresult_train[iipar,:,:].flatten()), \
                           np.mean(xresult_test[iipar,:,:].flatten()), \
                           np.mean(np.mean(xpr[iipar,:,:,:],0),0))

                print('Average best parameters')
                ##  sfield=dir(best_param)
                xlabels = ('c', 'd', 'par1', 'par2')
                for i in range(nparam):
                    ##    print(sfield[i])
                    print(xlabels[i],': ',np.mean(xbest_param[:,:,i]), \
                          '(',np.std(xbest_param[:,:,i]),')')

                print('xtime:', xtime)
                sys.stdout.flush()

                dresult[ifeature] = (cMMR.xbias,
                                     np.mean(np.mean(xpr[iipar, :, :, :], 0),
                                             0))

        for sfeature_type, tresult in dresult.items():
            ## xhead=cMMR.xbias
            xhead = ''
            lresult.append((xhead, tresult))

        ## lresult.sort()
        ## for litem in lresult:
        ##   print(litem)

        print('\\begin{tabular}{l|rrr}')
        print('& \\multicolumn{3}{c}{' + 'Objects' + '} \\\\')
        print('Feature type & Precision & Recall & F1 \\\\ \\hline')
        for litem in lresult:
            print(litem[0],' & ','%6.4f'%litem[1][1][0], \
                  ' & ','%6.4f'%litem[1][1][1],' & ','%6.4f'%litem[1][1][2],' \\\\')
        print('\\end{tabular}')

        ## print('\\begin{tabular}{l|rrr}')
        ## print('& \\multicolumn{3}{c}{'+'Objects'+'} \\\\')
        ## print('Feature & xbias & Precision & Recall & F1 \\\\ \\hline')
        ## for litem in lresult:
        ##   print(litem[0],' & ','%6.4f'%litem[1][0],' & ','%6.4f'%litem[1][1][0], \
        ##         ' & ','%6.4f'%litem[1][1][1],' & ','%6.4f'%litem[1][1][2],' \\\\')
        ## print('\\end{tabular}')

    ## ##########################################################
    ## !!!! It saves the optimal dual variables, and optimal, crossvalidated,
    ##  kernel parameters into files given in vision_load_data.

    ## prepare full training with the best parameters

    ifold = 0
    xselector = np.ones(mdata)
    cMMR.split_train_test(xselector, ifold)
    best_param = np.array(
        [np.mean(xbest_param[:, :, i]) for i in range(nparam)])
    cMMR.penalty.c = best_param[0]
    cMMR.penalty.d = best_param[1]
    cMMR.XKernel[0].kernel_params.ipar1 = best_param[2]
    cMMR.XKernel[0].kernel_params.ipar2 = best_param[3]

    cMMR.compute_kernels()
    cMMR.Y0 = cMMR.YKernel.get_train(cMMR.itrain)  ## candidates
    ## training with the best parameters
    print('Full training')
    cOptDual = cMMR.mmr_train()

    np.savetxt(cdata_store.sbasedir+cdata_store.dual_params,cMMR.dual.alpha, \
               fmt='%9.4f')
    np.savetxt(cdata_store.sbasedir+cdata_store.kernel_params,best_param[2:], \
               fmt='%9.4f')

    print(xclassconfusion)

    print('Bye')

    return
Exemplo n.º 8
0
def mmr_main(iworkmode):

    params = mmr_setparams.cls_params()

    ## @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@

    list_features=["annot","DenseHue","DenseHueV3H1", \
                  "DenseSift","DenseSiftV3H1","Gist", \
                  "HarrisHue","HarrisHueV3H1","HarrisSift", \
                  "HarrisSiftV3H1","Hsv","HsvV3H1","Lab", \
                  "LabV3H1","Rgb","RgbV3H1"]

    ## data files in the corresponding directories
    datadirs = ['corel5k', 'espgame', 'iaprtc12', 'mirflickr', 'pascal07']

    ## Example lfeatures=[4,8] means we selected the features:
    ##                                "DenseSiftV3H1" and  "HarrisSift"
    lfeatures = [4]
    params.ninputview = len(lfeatures)
    idata = 0  ## process corel5k data set

    xdatacls = mmr_mmr_cls.cls_mmr(params.ninputview)
    nfold = xdatacls.nfold
    nrepeat = xdatacls.nrepeat
    print('Xbias:', xdatacls.xbias)

    cdata_store = mmr_load_data.cls_data_load()
    cdata_store.load_data(xdatacls, idata, lfeatures)
    mdata = xdatacls.mdata

    ## initializing the array collecting the results
    nscore = 4
    nipar = 1
    if xdatacls.crossval_mode == 0:  ## random
        nfold0 = nfold
        xresult_test = np.zeros((nipar, nrepeat, nfold0))
        xresult_train = np.zeros((nipar, nrepeat, nfold0))
        xpr = np.zeros((nipar, nrepeat, nfold0, nscore))
    elif xdatacls.crossval_mode == 1:  ## predefined trianing and test
        nrepeat = 1
        nfold0 = 1
        xresult_test = np.zeros((nipar, nrepeat, nfold0))
        xresult_train = np.zeros((nipar, nrepeat, nfold0))
        xpr = np.zeros((nipar, nrepeat, nfold0, nscore))

    ## -----------------------------------------------
    print('Output kernel type: ', xdatacls.YKernel.kernel_params.kernel_type)
    for i in range(params.ninputview):
        print(i, 'Input kernel type: ',
              xdatacls.XKernel[i].kernel_params.kernel_type)
    ## -------------------------------------------------

    xcross = np.zeros((mdata, mdata))

    xtime = np.zeros(5)
    ## ############################################################
    nparam = 4  ## C,D,par1,par2
    xbest_param = np.zeros((nrepeat, nfold0, nparam))

    for iipar in range(nipar):

        print('===================================================')
        for irepeat in range(nrepeat):

            xdatacls.prepare_repetition_training(nfold0)

            for ifold in range(nfold0):

                xdatacls.prepare_fold_training(ifold)

                ## validation to choose the best parameters
                print('Validation')
                t0 = time.clock()
                xdatacls.set_validation()
                cvalidation = mmr_validation_cls.cls_mmr_validation()
                cvalidation.validation_rkernel = xdatacls.XKernel[0].title
                best_param = cvalidation.mmr_validation(xdatacls)

                xtime[0] = time.clock() - t0

                print('Best parameters found by validation')
                print('c: ', best_param.c)
                print('d: ', best_param.d)
                print('par1: ', best_param.par1)
                print('par2: ', best_param.par2)
                xbest_param[irepeat, ifold, 0] = best_param.c
                xbest_param[irepeat, ifold, 1] = best_param.d
                xbest_param[irepeat, ifold, 2] = best_param.par1
                xbest_param[irepeat, ifold, 3] = best_param.par2

                xdatacls.compute_kernels()
                xdatacls.Y0 = xdatacls.YKernel.get_train(
                    xdatacls.itrain)  ## candidates

                ## training with the best parameters
                print('Training')

                print(xdatacls.YKernel.kernel_params.kernel_type, \
                      xdatacls.YKernel.kernel_params.ipar1, \
                      xdatacls.YKernel.kernel_params.ipar2)
                for iview in range(xdatacls.ninputview):
                    print(xdatacls.XKernel[iview].kernel_params.kernel_type, \
                          xdatacls.XKernel[iview].kernel_params.ipar1, \
                          xdatacls.XKernel[iview].kernel_params.ipar2)

                t0 = time.clock()
                cOptDual = xdatacls.mmr_train()
                xtime[1] = time.clock() - t0
                ## cls transfers the dual variables to the test procedure
                ## compute tests
                ## check the train accuracy
                print('Test')
                cPredictTra = xdatacls.mmr_test(cOptDual, itraindata=0)
                ## counts the proportion the ones predicted correctly
                ## ######################################
                if xdatacls.itestmode == 2:
                    print('Test knn')
                    ypred=inverse_knn(xdatacls.YKernel.get_Y0(xdatacls.itrain), \
                                      cPredictTra)
                else:
                    ypred = cPredictTra.zPred
                cEvaluationTra= \
                      mmr_eval_binvector(xdatacls.YKernel.get_train(xdatacls.itrain), \
                                         ypred)
                xresult_train[iipar, irepeat, ifold] = cEvaluationTra.accuracy
                print('>>>>>>>>>>>\n', cEvaluationTra.confusion)
                ## ######################################
                ## check the test accuracy
                t0 = time.clock()
                cPredictTes = xdatacls.mmr_test(cOptDual, itraindata=1)
                ## counts the proportion the ones predicted correctly
                if xdatacls.itestmode == 2:
                    ypred=inverse_knn(xdatacls.YKernel.get_Y0(xdatacls.itrain), \
                                      cPredictTes)
                else:
                    ypred = cPredictTes.zPred
                ## cEvaluationTes=mmr_eval_binvector(cData.YTest,cPredictTes.zPred)
                cEvaluationTes= \
                      mmr_eval_binvector(xdatacls.YKernel.get_test(xdatacls.itest), \
                                         ypred)

                xtime[2] = time.clock() - t0
                xresult_test[iipar, irepeat, ifold] = cEvaluationTes.accuracy

                xpr[iipar, irepeat, ifold, 0] = cEvaluationTes.precision
                xpr[iipar, irepeat, ifold, 1] = cEvaluationTes.recall
                xpr[iipar, irepeat, ifold, 2] = cEvaluationTes.f1
                xpr[iipar, irepeat, ifold, 3] = cEvaluationTes.accuracy

                print(cEvaluationTes.confusion)
                print(cEvaluationTes.classconfusion)
                try:
                    xclassconfusion += cEvaluationTes.classconfusion
                except:
                    (n, n) = cEvaluationTes.classconfusion.shape
                    xclassconfusion = np.zeros((n, n))
                    xclassconfusion += cEvaluationTes.classconfusion
                ## mmr_eval_label(ZW,iPre,YTesN,Y0,kit_data,itest,params)

    ## ####################################
                print('Parameter:',iipar,'Repetition: ',irepeat, \
                      'Fold: ',ifold)
                mmr_report.mmr_report('Result on one fold',
                           xresult_train[iipar,irepeat,ifold], \
                           xresult_test[iipar,irepeat,ifold], \
                           xpr[iipar,irepeat,ifold,:])
                print(
                    np.sum(xpr[iipar, irepeat, :ifold + 1, :], 0) /
                    (ifold + 1))

            mmr_report.mmr_report('Result on one repetition',
                       np.mean(xresult_train[iipar,irepeat,:]), \
                       np.mean(xresult_test[iipar,irepeat,:]), \
                       np.mean(xpr[iipar,irepeat,:,:],0))

        mmr_report.mmr_report('Result on all repetitions @@@@@@@',
                   np.mean(xresult_train[iipar,:,:].flatten()), \
                   np.mean(xresult_test[iipar,:,:].flatten()), \
                   np.mean(np.mean(xpr[iipar,:,:,:],0),0))

        print('Average best parameters')
        xlabels = ('c', 'd', 'par1', 'par2')
        for i in range(nparam):
            print(xlabels[i],': ',np.mean(xbest_param[:,:,i]), \
                    '(',np.std(xbest_param[:,:,i]),')')

        print('xtime:', xtime)
        sys.stdout.flush()

    print('Bye')

    return