def __init__(self, endog, exog, window=None, weights=None, min_nobs=None, missing='drop'): # Call Model.__init__ twice to use const detection in first pass # But to not drop in the second pass missing = string_like(missing, 'missing', options=('drop', 'raise', 'skip')) temp_msng = 'drop' if missing != 'raise' else 'raise' Model.__init__(self, endog, exog, missing=temp_msng, hasconst=None) k_const = self.k_constant const_idx = self.data.const_idx Model.__init__(self, endog, exog, missing='none', hasconst=False) self.k_constant = k_const self.data.const_idx = const_idx self._y = array_like(endog, 'endog') nobs = self._y.shape[0] self._x = array_like(exog, 'endog', ndim=2, shape=(nobs, None)) window = int_like(window, 'window', optional=True) weights = array_like(weights, 'weights', optional=True, shape=(nobs,)) self._window = window if window is not None else self._y.shape[0] self._weighted = weights is not None self._weights = np.ones(nobs) if weights is None else weights w12 = np.sqrt(self._weights) self._wy = w12 * self._y self._wx = w12[:, None] * self._x self._is_nan = np.zeros_like(self._y, dtype=np.bool) self._has_nan = self._find_nans() self.const_idx = self.data.const_idx self._skip_missing = missing == 'skip' min_nobs = int_like(min_nobs, 'min_nobs', optional=True) self._min_nobs = min_nobs if min_nobs is not None else self._x.shape[1] if self._min_nobs < self._x.shape[1] or self._min_nobs > self._window: raise ValueError('min_nobs must be larger than the number of ' 'regressors in the model and less than window')
def __init__( self, endog, exog, window=None, *, weights=None, min_nobs=None, missing="drop", expanding=False ): # Call Model.__init__ twice to use const detection in first pass # But to not drop in the second pass missing = string_like( missing, "missing", options=("drop", "raise", "skip") ) temp_msng = "drop" if missing != "raise" else "raise" Model.__init__(self, endog, exog, missing=temp_msng, hasconst=None) k_const = self.k_constant const_idx = self.data.const_idx Model.__init__(self, endog, exog, missing="none", hasconst=False) self.k_constant = k_const self.data.const_idx = const_idx self._y = array_like(endog, "endog") nobs = self._y.shape[0] self._x = array_like(exog, "endog", ndim=2, shape=(nobs, None)) window = int_like(window, "window", optional=True) weights = array_like(weights, "weights", optional=True, shape=(nobs,)) self._window = window if window is not None else self._y.shape[0] self._weighted = weights is not None self._weights = np.ones(nobs) if weights is None else weights w12 = np.sqrt(self._weights) self._wy = w12 * self._y self._wx = w12[:, None] * self._x min_nobs = int_like(min_nobs, "min_nobs", optional=True) self._min_nobs = min_nobs if min_nobs is not None else self._x.shape[1] if self._min_nobs < self._x.shape[1] or self._min_nobs > self._window: raise ValueError( "min_nobs must be larger than the number of " "regressors in the model and less than window" ) self._expanding = expanding self._is_nan = np.zeros_like(self._y, dtype=bool) self._has_nan = self._find_nans() self.const_idx = self.data.const_idx self._skip_missing = missing == "skip"
def _handle_data(self, endog, exog, missing, hasconst, **kwargs): return Model._handle_data(self, endog, exog, missing, hasconst, **kwargs)