def copy(self, new_obj): nkernel = len(self.XKernel) ndata = len(self.xdata_rel) new_obj.xdata_rel = [None] * ndata for i in range(ndata): new_obj.xdata_rel[i] = self.xdata_rel[i][self.itrain] for ikernel in range(nkernel): new_obj.XKernel[ikernel] = self.XKernel[ikernel].copy() new_obj.YKernel = self.YKernel.copy() new_obj.set_validation() new_obj.penalty = base.cls_penalty() new_obj.penalty.c = self.penalty.c new_obj.penalty.d = self.penalty.d new_obj.penalty.crossval = self.penalty.crossval new_obj.glm_model = self.glm_model new_obj.nrow = self.nrow new_obj.ncol = self.ncol new_obj.itestmode = self.itestmode new_obj.kmode = self.kmode new_obj.xbias = self.xbias new_obj.ieval_type = self.ieval_type new_obj.ibinary = self.ibinary new_obj.categorymax = self.categorymax new_obj.Y0 = self.Y0 new_obj.rowcol = self.rowcol
def copy(self,new_obj): nkernel=len(self.XKernel) ndata=len(self.xdata_rel) new_obj.xdata_rel=[None]*ndata for i in range(ndata): new_obj.xdata_rel[i]=self.xdata_rel[i][self.itrain] for ikernel in range(nkernel): new_obj.XKernel[ikernel]=self.XKernel[ikernel].copy() new_obj.YKernel=self.YKernel.copy() new_obj.set_validation() new_obj.penalty=base.cls_penalty() new_obj.penalty.c=self.penalty.c new_obj.penalty.d=self.penalty.d new_obj.penalty.crossval=self.penalty.crossval new_obj.glm_model=self.glm_model new_obj.nrow=self.nrow new_obj.ncol=self.ncol new_obj.itestmode=self.itestmode new_obj.kmode=self.kmode new_obj.xbias=self.xbias new_obj.ieval_type=self.ieval_type new_obj.ibinary=self.ibinary new_obj.categorymax=self.categorymax new_obj.Y0=self.Y0 new_obj.rowcol=self.rowcol
def __init__(self, ninputview): base.cls_data.__init__(self, ninputview) self.XKernel = [None] * ninputview self.YKernel = None self.KX = None self.KXCross = None self.KY = None self.d1x = None self.d2x = None self.d1x = None self.penalty = base.cls_penalty( ) ## setting C,D penelty term paramters self.penalty.set_crossval() ## perceptron self.iperceptron = 0 self.perceptron = base.cls_perceptron_param() ## other classes self.dual = None ## self.xbias=-0.6 self.xbias = 0.0 self.kmode = 1 ## =0 additive (feature concatenation) ## =1 multiplicative (feature tensor product) self.ifixtrain = None self.ifixtest = None self.crossval_mode = 0 ## =0 random cross folds =1 fixtraining ## itestmode can be 2 if YKernel is linear !!! self.itestmode = 0 ## 2 against the training with knn, 10 vectorwise ## 20 Y0 is available ## ################################################## ## !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! self.nrepeat = 10000 self.nfold = 5 ## ################################################## self.testknn = 5 self.ieval_type = 0 self.mdata = 0
def __init__(self,ninputview): base.cls_data.__init__(self,ninputview) self.XKernel=[None]*ninputview self.YKernel=None self.KX=None self.KXCross=None self.KY=None self.d1x=None self.d2x=None self.d1x=None self.penalty=base.cls_penalty() ## setting C,D penelty term paramters self.penalty.set_crossval() ## perceptron self.iperceptron=0 self.perceptron=base.cls_perceptron_param() ## other classes self.dual=None ## self.xbias=-0.6 self.xbias=0.0 self.kmode=1 ## =0 additive (feature concatenation) ## =1 multiplicative (feature tensor product) self.ifixtrain=None self.ifixtest=None self.crossval_mode=0 ## =0 random cross folds =1 fixtraining ## itestmode can be 2 if YKernel is linear !!! self.itestmode=0 ## 2 against the training with knn, 10 vectorwise ## 20 Y0 is available ## ################################################## ## !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!11 self.nrepeat=5000 self.nfold=5 ## ################################################## self.testknn=5 self.ieval_type=0 self.mdata=0
def copy(self,new_obj,itrain): nkernel=len(self.XKernel) for ikernel in range(nkernel): xdata=self.XKernel[ikernel].get_train(self.itrain) new_obj.XKernel[ikernel]=self.XKernel[ikernel].copy(xdata) xdata=self.YKernel.get_train(self.itrain) new_obj.YKernel=self.YKernel.copy(xdata) new_obj.set_validation() new_obj.penalty=base.cls_penalty() new_obj.penalty.c=self.penalty.c new_obj.penalty.d=self.penalty.d new_obj.penalty.crossval=self.penalty.crossval new_obj.mdata=len(itrain) new_obj.itestmode=self.itestmode new_obj.testknn=self.testknn new_obj.kmode=self.kmode new_obj.xbias=self.xbias
def copy(self, new_obj, itrain): nkernel = len(self.XKernel) for ikernel in range(nkernel): xdata = self.XKernel[ikernel].get_train(self.itrain) new_obj.XKernel[ikernel] = self.XKernel[ikernel].copy(xdata) xdata = self.YKernel.get_train(self.itrain) new_obj.YKernel = self.YKernel.copy(xdata) new_obj.set_validation() new_obj.penalty = base.cls_penalty() new_obj.penalty.c = self.penalty.c new_obj.penalty.d = self.penalty.d new_obj.penalty.crossval = self.penalty.crossval new_obj.mdata = len(itrain) new_obj.itestmode = self.itestmode new_obj.testknn = self.testknn new_obj.kmode = self.kmode new_obj.xbias = self.xbias
def __init__(self, ninputview=1): base.cls_data.__init__(self, ninputview) self.XKernel = [None] * ninputview ## list of input kernel objects self.YKernel = None ## ouput kernel object self.KX = None ## compound input kernel self.KY = None ## output kernel ## mvm specific self.xdata_rel = None ## all data tuples (irow,icol,value) self.xdata_tra = None ## training tuples self.xdata_tes = None ## test tuples self.xranges_rel = None ## training ranges, start position, and length ## of the known items in each row in ## the sparse representation self.xranges_rel_test = None ## ranges for the test self.KXvar = None self.glm_model = None ## the parameters, means of the GLM model: ## total mean, row means, column means self.largest_class = None ## row wise largest classes if values are class ## indexes self.penalty = base.cls_penalty( ) ## setting C,D penelty term paramters ## other classes self.dual = None ## the vector of the dual variables computed ## by the solver self.xbias = 0 ## penalty term for projective bias, it can be =0 self.kmode = 0 ## =0 additive (feature concatenation) ## =1 multiplicative (fetaure tensor product) self.ifixtrain = None ## xdata_rel relative indexes of the fix training self.ifixtest = None ## xdata_rel relative indexes of the fix test self.crossval_mode = 0 ## =0 random cross folds =1 fixtraining self.itestmode = 3 ## 0 active learning 1,2 random subsets, ## 3 fix training test self.ibootstrap = 2 ## =0 random =1 worst case =2 best case ## =3 alternate between worst case and random self.nrepeat = 1 ## number of repetation of the folding self.nfold = 2 ## number of folds self.nrepeat0 = 1 ## number of effective repetation of the folding self.nfold0 = 2 ## number of effective folds self.ieval_type = 0 ## =0 category, =1 RMSE , =2 MAE self.ibinary = 1 ## =1 Y0=[-1,+1], =0 [0,1,...,categorymax-1] ## mvm specific self.category = 0 ## =0 rank cells =1 category cells =2 {-1,0,+1}^n ## =3 joint table on all categories self.categorymax = 0 self.ndata = 0 ## all non-missing example in the relation table self.ncol = 0 ## number of rows in the relation table self.nrow = 0 ## number of column in the relation table ## test self.verbose = 0 ## row-column exchange self.rowcol = 0 ## =0 row-col order =1 col-row order ## for n-fold cross validation self.xselector = None ## a 1d array randomly loaded with ## the indexes of the folds to select the training ## and test in the cross-validation ## active learning pointers self.icandidate_w = -1 ## the test relative index of ## the worst case in prediction by confidence self.icandidate_b = -1 ## the test relative index of ## the best case in prediction by confidence self.testontrain = 0 ## =0 test on test, =1 test on train self.knowntest = 1 ## =0 test items are unknown, =1 known self.confidence_scale = 2 ## scale paramter, e.g. standard deviation, ## of the distribution ## used in the confidence estimation self.confidence_local = 0 ## localization paramter, e.g. mean,
def __init__(self,ninputview=1): base.cls_data.__init__(self,ninputview) self.XKernel=[ None ]*ninputview ## list of input kernel objects self.YKernel=None ## ouput kernel object self.KX=None ## compound input kernel self.KY=None ## output kernel ## mvm specific self.xdata_rel=None ## all data tuples (irow,icol,value) self.xdata_tra=None ## training tuples self.xdata_tes=None ## test tuples self.xranges_rel=None ## training ranges, start position, and length ## of the known items in each row in ## the sparse representation self.xranges_rel_test=None ## ranges for the test self.KXvar=None self.glm_model=None ## the parameters, means of the GLM model: ## total mean, row means, column means self.largest_class=None ## row wise largest classes if values are class ## indexes self.penalty=base.cls_penalty() ## setting C,D penelty term paramters ## other classes self.dual=None ## the vector of the dual variables computed ## by the solver self.xbias=0 ## penalty term for projective bias, it can be =0 self.kmode=0 ## =0 additive (feature concatenation) ## =1 multiplicative (fetaure tensor product) self.ifixtrain=None ## xdata_rel relative indexes of the fix training self.ifixtest=None ## xdata_rel relative indexes of the fix test self.crossval_mode=0 ## =0 random cross folds =1 fixtraining self.itestmode=3 ## 0 active learning 1,2 random subsets, ## 3 fix training test self.ibootstrap=2 ## =0 random =1 worst case =2 best case ## =3 alternate between worst case and random self.nrepeat=1 ## number of repetation of the folding self.nfold=2 ## number of folds self.nrepeat0=1 ## number of effective repetation of the folding self.nfold0=2 ## number of effective folds self.ieval_type=0 ## =0 category, =1 RMSE , =2 MAE self.ibinary=1 ## =1 Y0=[-1,+1], =0 [0,1,...,categorymax-1] ## mvm specific self.category=0 ## =0 rank cells =1 category cells =2 {-1,0,+1}^n ## =3 joint table on all categories self.categorymax=0 self.ndata=0 ## all non-missing example in the relation table self.ncol=0 ## number of rows in the relation table self.nrow=0 ## number of column in the relation table ## test self.verbose=0 ## row-column exchange self.rowcol=0 ## =0 row-col order =1 col-row order ## for n-fold cross validation self.xselector=None ## a 1d array randomly loaded with ## the indexes of the folds to select the training ## and test in the cross-validation ## active learning pointers self.icandidate_w=-1 ## the test relative index of ## the worst case in prediction by confidence self.icandidate_b=-1 ## the test relative index of ## the best case in prediction by confidence self.testontrain=0 ## =0 test on test, =1 test on train self.knowntest=1 ## =0 test items are unknown, =1 known self.confidence_scale=2 ## scale paramter, e.g. standard deviation, ## of the distribution ## used in the confidence estimation self.confidence_local=0 ## localization paramter, e.g. mean,