def _validate_arguments(self): """method to sanitize model parameters Parameters --------- None Returns ------- None """ # dtype if self.dtype not in ['numerical', 'categorical']: raise ValueError("dtype must be in ['numerical','categorical'], "\ "but found dtype = {}".format(self.dtype)) # fit_linear XOR fit_splines if self.fit_linear == self.fit_splines: raise ValueError('term must have fit_linear XOR fit_splines, but found: ' 'fit_linear= {}, fit_splines={}'.format(self.fit_linear, self.fit_splines)) # penalties if not isiterable(self.penalties): self.penalties = [self.penalties] for i, p in enumerate(self.penalties): if not (hasattr(p, '__call__') or (p in PENALTIES) or (p is None)): raise ValueError("penalties must be callable or in "\ "{}, but found {} for {}th penalty"\ .format(list(PENALTIES.keys()), p, i)) # check lams and distribute to penalites if not isiterable(self.lam): self.lam = [self.lam] for lam in self.lam: check_param(lam, param_name='lam', dtype='float', constraint='>= 0') if len(self.lam) == 1: self.lam = self.lam * len(self.penalties) if len(self.lam) != len(self.penalties): raise ValueError('expected 1 lam per penalty, but found '\ 'lam = {}, penalties = {}'.format(self.lam, self.penalties)) # constraints if not isiterable(self.constraints): self.constraints = [self.constraints] for i, c in enumerate(self.constraints): if not (hasattr(c, '__call__') or (c in CONSTRAINTS) or (c is None)): raise ValueError("constraints must be callable or in "\ "{}, but found {} for {}th constraint"\ .format(list(CONSTRAINTS.keys()), c, i)) return self
def __setattr__(self, name, value): if self._has_terms() and name in self._super_get('_plural'): # get the total number of arguments size = np.atleast_1d(flatten(getattr(self, name))).size # check shapes if isiterable(value): value = flatten(value) if len(value) != size: raise ValueError('Expected {} to have length {}, but found {} = {}'\ .format(name, size, name, value)) else: value = [value] * size # now set each term's sequence of arguments for term in self._get_terms()[::-1]: # skip intercept if term.isintercept: continue # how many values does this term get? n = np.atleast_1d(getattr(term, name)).size # get the next n values and set them on this term vals = [value.pop() for _ in range(n)][::-1] setattr(term, name, vals[0] if n == 1 else vals) term._validate_arguments() return super(MetaTermMixin, self).__setattr__(name, value)
def _parse_terms(self, args, **kwargs): m = len(args) if m < 2: raise ValueError('TensorTerm requires at least 2 marginal terms') for k, v in kwargs.items(): if isiterable(v): if len(v) != m: raise ValueError('Expected {} to have length {}, but found {} = {}'\ .format(k, m, k, v)) else: kwargs[k] = [v] * m terms = [] for i, arg in enumerate(np.atleast_1d(args)): if isinstance(arg, TensorTerm): raise ValueError('TensorTerm does not accept other TensorTerms. '\ 'Please build a flat TensorTerm instead of a nested one.') if isinstance(arg, Term): if self.verbose and kwargs: warnings.warn('kwargs are skipped when Term instances are passed to TensorTerm constructor') terms.append(arg) continue kwargs_ = {'n_splines': self._N_SPLINES} kwargs_.update({k: v[i] for k, v in kwargs.items()}) terms.append(SplineTerm(arg, **kwargs_)) return terms
def _has_terms(self): """bool, whether the instance has any sub-terms """ loc = self._super_get('_term_location') return self._super_has(loc) \ and isiterable(self._super_get(loc)) \ and len(self._super_get(loc)) > 0 \ and all([isinstance(term, Term) for term in self._super_get(loc)])