def test_studentst(self):
        dist = StudentsT()
        v = 4.0
        ll1 = dist.loglikelihoood(np.array([v]), self.resids, self.sigma2)
        # Direct calculation of PDF, then log
        constant = np.exp(gammaln(0.5 * (v + 1)) - gammaln(0.5 * v))
        pdf = constant / np.sqrt(np.pi * (v - 2) * self.sigma2)
        pdf *= (1 + self.resids ** 2.0 / (self.sigma2 * (v - 2))) ** (
            -(v + 1) / 2)
        ll2 = np.log(pdf).sum()
        assert_almost_equal(ll1, ll2)

        assert_equal(dist.num_params, 1)

        bounds = dist.bounds(self.resids)
        assert_equal(len(bounds), 1)

        A, b = dist.constraints()
        assert_equal(A.shape, (2, 1))

        k = stats.kurtosis(self.resids, fisher=False)
        sv = max((4.0 * k - 6.0) / (k - 3.0) if k > 3.75 else 12.0, 4.0)
        assert_array_equal(dist.starting_values(self.resids), np.array([sv]))

        assert_raises(ValueError, dist.simulate, np.array([1.5]))
Beispiel #2
0
 def test_warnings_nonstationary(self):
     garch = GARCH()
     parameters = np.array([0.1, 0.2, 0.8, 4.0])
     studt = StudentsT()
     warnings.simplefilter('always', UserWarning)
     with pytest.warns(InitialValueWarning):
         garch.simulate(parameters, 1000, studt.simulate([4.0]))
Beispiel #3
0
    def test_warnings(self):
        garch = GARCH()
        parameters = np.array([0.1, 0.2, 0.8, 4.0])
        studt = StudentsT()
        with warnings.catch_warnings(record=True) as w:
            garch.simulate(parameters, 1000, studt.simulate([4.0]))
            assert_equal(len(w), 1)

        harch = HARCH(lags=[1, 5, 22])
        parameters = np.array([0.1, 0.2, 0.4, 0.5])
        with warnings.catch_warnings(record=True) as w:
            harch.simulate(parameters, 1000, studt.simulate([4.0]))
            assert_equal(len(w), 1)
Beispiel #4
0
    def test_warnings(self):
        garch = GARCH()
        parameters = np.array([0.1, 0.2, 0.8, 4.0])
        studt = StudentsT()
        with warnings.catch_warnings(record=True) as w:
            garch.simulate(parameters, 1000, studt.simulate([4.0]))
            assert_equal(len(w), 1)

        harch = HARCH(lags=[1, 5, 22])
        parameters = np.array([0.1, 0.2, 0.4, 0.5])
        with warnings.catch_warnings(record=True) as w:
            harch.simulate(parameters, 1000, studt.simulate([4.0]))
            assert_equal(len(w), 1)
Beispiel #5
0
    def test_studentst(self):
        dist = StudentsT()
        v = 4.0
        ll1 = dist.loglikelihood(np.array([v]), self.resids, self.sigma2)
        # Direct calculation of PDF, then log
        constant = np.exp(gammaln(0.5 * (v + 1)) - gammaln(0.5 * v))
        pdf = constant / np.sqrt(np.pi * (v - 2) * self.sigma2)
        pdf *= (1 + self.resids**2.0 / (self.sigma2 * (v - 2)))**(-(v + 1) / 2)
        ll2 = np.log(pdf).sum()
        assert_almost_equal(ll1, ll2)

        assert_equal(dist.num_params, 1)

        bounds = dist.bounds(self.resids)
        assert_equal(len(bounds), 1)

        a, b = dist.constraints()
        assert_equal(a.shape, (2, 1))

        k = stats.kurtosis(self.resids, fisher=False)
        sv = max((4.0 * k - 6.0) / (k - 3.0) if k > 3.75 else 12.0, 4.0)
        assert_array_equal(dist.starting_values(self.resids), np.array([sv]))

        with pytest.raises(ValueError):
            dist.simulate(np.array([1.5]))
Beispiel #6
0
 def test_egarch(self):
     cm = ConstantMean(self.y)
     cm.volatility = EGARCH()
     res = cm.fit(update_freq=0, disp=DISPLAY)
     summ = res.summary()
     assert "Df Model:                            1" in str(summ)
     cm.distribution = StudentsT()
     cm.fit(update_freq=0, disp=DISPLAY)
Beispiel #7
0
def test_invalid_params():
    pits = np.arange(1, 100.0) / 100.0
    dist = Normal()
    with pytest.raises(ValueError):
        dist.ppf(pits, [1.0])
    dist = StudentsT()
    with pytest.raises(ValueError):
        dist.ppf(pits, [1.0])
Beispiel #8
0
    def test_ar_plot(self):
        ar = ARX(self.y, lags=1, volatility=GARCH(), distribution=StudentsT())
        res = ar.fit(disp=DISPLAY, update_freq=5, cov_type='mle')
        res.plot()
        res.plot(annualize='D')
        res.plot(annualize='W')
        res.plot(annualize='M')
        with pytest.raises(ValueError):
            res.plot(annualize='unknown')

        res.plot(scale=360)
        res.hedgehog_plot(start=500)
        res.hedgehog_plot(start=500, type='mean')
        res.hedgehog_plot(type='volatility')
        res.hedgehog_plot(start=500, method='simulation', simulations=100)
Beispiel #9
0
    def test_ar_plot(self):
        ar = ARX(self.y, lags=1, volatility=GARCH(), distribution=StudentsT())
        res = ar.fit(disp=DISPLAY, update_freq=5, cov_type="mle")
        res.plot()
        res.plot(annualize="D")
        res.plot(annualize="W")
        res.plot(annualize="M")
        with pytest.raises(ValueError):
            res.plot(annualize="unknown")

        res.plot(scale=360)
        res.hedgehog_plot(start=500)
        res.hedgehog_plot(start=500, plot_type="mean")
        res.hedgehog_plot(plot_type="volatility")
        res.hedgehog_plot(start=500, method="simulation", simulations=100)
        res.hedgehog_plot(plot_type="volatility", method="bootstrap")
Beispiel #10
0
def arch_model(
    y: Optional[ArrayLike],
    x: Optional[ArrayLike] = None,
    mean: str = "Constant",
    lags: Optional[Union[int, List[int], NDArray]] = 0,
    vol: str = "Garch",
    p: Union[int, List[int]] = 1,
    o: int = 0,
    q: int = 1,
    power: float = 2.0,
    dist: str = "Normal",
    hold_back: Optional[int] = None,
    rescale: Optional[bool] = None,
) -> HARX:
    """
    Initialization of common ARCH model specifications

    Parameters
    ----------
    y : {ndarray, Series, None}
        The dependent variable
    x : {np.array, DataFrame}, optional
        Exogenous regressors.  Ignored if model does not permit exogenous
        regressors.
    mean : str, optional
        Name of the mean model.  Currently supported options are: 'Constant',
        'Zero', 'LS', 'AR', 'ARX', 'HAR' and  'HARX'
    lags : int or list (int), optional
        Either a scalar integer value indicating lag length or a list of
        integers specifying lag locations.
    vol : str, optional
        Name of the volatility model.  Currently supported options are:
        'GARCH' (default), 'ARCH', 'EGARCH', 'FIARCH' and 'HARCH'
    p : int, optional
        Lag order of the symmetric innovation
    o : int, optional
        Lag order of the asymmetric innovation
    q : int, optional
        Lag order of lagged volatility or equivalent
    power : float, optional
        Power to use with GARCH and related models
    dist : int, optional
        Name of the error distribution.  Currently supported options are:

            * Normal: 'normal', 'gaussian' (default)
            * Students's t: 't', 'studentst'
            * Skewed Student's t: 'skewstudent', 'skewt'
            * Generalized Error Distribution: 'ged', 'generalized error"

    hold_back : int
        Number of observations at the start of the sample to exclude when
        estimating model parameters.  Used when comparing models with different
        lag lengths to estimate on the common sample.
    rescale : bool
        Flag indicating whether to automatically rescale data if the scale
        of the data is likely to produce convergence issues when estimating
        model parameters. If False, the model is estimated on the data without
        transformation.  If True, than y is rescaled and the new scale is
        reported in the estimation results.

    Returns
    -------
    model : ARCHModel
        Configured ARCH model

    Examples
    --------
    >>> import datetime as dt
    >>> import pandas_datareader.data as web
    >>> djia = web.get_data_fred('DJIA')
    >>> returns = 100 * djia['DJIA'].pct_change().dropna()

    A basic GARCH(1,1) with a constant mean can be constructed using only
    the return data

    >>> from arch.univariate import arch_model
    >>> am = arch_model(returns)

    Alternative mean and volatility processes can be directly specified

    >>> am = arch_model(returns, mean='AR', lags=2, vol='harch', p=[1, 5, 22])

    This example demonstrates the construction of a zero mean process
    with a TARCH volatility process and Student t error distribution

    >>> am = arch_model(returns, mean='zero', p=1, o=1, q=1,
    ...                 power=1.0, dist='StudentsT')

    Notes
    -----
    Input that are not relevant for a particular specification, such as `lags`
    when `mean='zero'`, are silently ignored.
    """
    known_mean = ("zero", "constant", "harx", "har", "ar", "arx", "ls")
    known_vol = ("arch", "figarch", "garch", "harch", "constant", "egarch")
    known_dist = (
        "normal",
        "gaussian",
        "studentst",
        "t",
        "skewstudent",
        "skewt",
        "ged",
        "generalized error",
    )
    mean = mean.lower()
    vol = vol.lower()
    dist = dist.lower()
    if mean not in known_mean:
        raise ValueError("Unknown model type in mean")
    if vol.lower() not in known_vol:
        raise ValueError("Unknown model type in vol")
    if dist.lower() not in known_dist:
        raise ValueError("Unknown model type in dist")

    if mean == "harx":
        am = HARX(y, x, lags, hold_back=hold_back, rescale=rescale)
    elif mean == "har":
        am = HARX(y, None, lags, hold_back=hold_back, rescale=rescale)
    elif mean == "arx":
        am = ARX(y, x, lags, hold_back=hold_back, rescale=rescale)
    elif mean == "ar":
        am = ARX(y, None, lags, hold_back=hold_back, rescale=rescale)
    elif mean == "ls":
        am = LS(y, x, hold_back=hold_back, rescale=rescale)
    elif mean == "constant":
        am = ConstantMean(y, hold_back=hold_back, rescale=rescale)
    else:  # mean == "zero"
        am = ZeroMean(y, hold_back=hold_back, rescale=rescale)

    if vol in ("arch", "garch", "figarch",
               "egarch") and not isinstance(p, int):
        raise TypeError(
            "p must be a scalar int for all volatility processes except HARCH."
        )

    if vol == "constant":
        v: VolatilityProcess = ConstantVariance()
    elif vol == "arch":
        assert isinstance(p, int)
        v = ARCH(p=p)
    elif vol == "figarch":
        assert isinstance(p, int)
        v = FIGARCH(p=p, q=q)
    elif vol == "garch":
        assert isinstance(p, int)
        v = GARCH(p=p, o=o, q=q, power=power)
    elif vol == "egarch":
        assert isinstance(p, int)
        v = EGARCH(p=p, o=o, q=q)
    else:  # vol == 'harch'
        v = HARCH(lags=p)

    if dist in ("skewstudent", "skewt"):
        d: Distribution = SkewStudent()
    elif dist in ("studentst", "t"):
        d = StudentsT()
    elif dist in ("ged", "generalized error"):
        d = GeneralizedError()
    else:  # ('gaussian', 'normal')
        d = Normal()

    am.volatility = v
    am.distribution = d

    return am
Beispiel #11
0
 def test_warnings_nonstationary_harch(self):
     studt = StudentsT()
     harch = HARCH(lags=[1, 5, 22])
     parameters = np.array([0.1, 0.2, 0.4, 0.5])
     with pytest.warns(InitialValueWarning):
         harch.simulate(parameters, 1000, studt.simulate([4.0]))
Beispiel #12
0
 def __init__(self, random_state=None):
     DistributionMixin.__init__(self)
     T.__init__(self, random_state)
Beispiel #13
0
from arch.univariate.distribution import (
    GeneralizedError,
    Normal,
    SkewStudent,
    StudentsT,
)

DISTRIBUTIONS = [
    (SkewStudent(), [6, -0.1]),
    (SkewStudent(), [6, -0.5]),
    (SkewStudent(), [6, 0.1]),
    (SkewStudent(), [6, 0.5]),
    (GeneralizedError(), [1.5]),
    (GeneralizedError(), [2.1]),
    (StudentsT(), [6]),
    (StudentsT(), [7]),
    (Normal(), None),
]


@pytest.mark.parametrize("dist, params", DISTRIBUTIONS)
def test_moment(dist, params):
    """
    Ensures that Distribtion.moment and .partial_moment agree
    with numeric integrals for order n=0,...,5 and z=+/-1,...,+/-5

    Parameters
    ----------
    dist : distribution.Distribution
        The distribution whose moments are being checked
Beispiel #14
0
    def test_ar(self):
        ar = ARX(self.y, lags=3)
        params = np.array([1.0, 0.4, 0.3, 0.2, 1.0])
        data = ar.simulate(params, self.T)
        assert_equal(self.y, ar.y)

        bounds = ar.bounds()
        for b in bounds:
            assert_equal(b[0], -np.inf)
            assert_equal(b[1], np.inf)
        assert_equal(len(bounds), 4)

        assert_equal(ar.num_params, 4)
        assert_true(ar.constant)
        a, b = ar.constraints()
        assert_equal(a, np.empty((0, 4)))
        assert_equal(b, np.empty(0))
        res = ar.fit()

        nobs = 1000 - 3
        rhs = np.ones((nobs, 4))
        y = self.y
        lhs = y[3:1000]
        for i in range(3, 1000):
            rhs[i - 3, 1] = y[i - 1]
            rhs[i - 3, 2] = y[i - 2]
            rhs[i - 3, 3] = y[i - 3]
        params = np.linalg.pinv(rhs).dot(lhs)
        assert_almost_equal(params, res.params[:-1])

        forecasts = res.forecast(horizon=5)
        direct = pd.DataFrame(index=np.arange(y.shape[0]),
                              columns=['h.' + str(i + 1) for i in range(5)],
                              dtype=np.float64)
        params = res.params.iloc[:-1]
        for i in range(2, y.shape[0]):
            fcast = np.zeros(y.shape[0] + 5)
            fcast[:y.shape[0]] = y.copy()
            for h in range(1, 6):
                reg = np.array([
                    1.0, fcast[i + h - 1], fcast[i + h - 2], fcast[i + h - 3]
                ])
                fcast[i + h] = reg.dot(params)
            direct.iloc[i, :] = fcast[i + 1:i + 6]
        assert_frame_equal(direct, forecasts)

        assert_equal(ar.first_obs, 3)
        assert_equal(ar.last_obs, 1000)
        assert_equal(ar.hold_back, None)
        assert_equal(ar.lags, np.array([[0, 1, 2], [1, 2, 3]]))
        assert_equal(ar.nobs, 997)
        assert_equal(ar.name, 'AR')
        assert_equal(ar.use_rotated, False)
        ar.__repr__()
        ar._repr_html_()

        ar = ARX(self.y_df, lags=5)
        ar.__repr__()
        ar = ARX(self.y_series, lags=5)
        ar.__repr__()
        res = ar.fit()
        assert_true(isinstance(res.resid, pd.Series))
        assert_true(isinstance(res.conditional_volatility, pd.Series))
        # Smoke tests
        summ = ar.fit().summary()
        ar = ARX(self.y, lags=1, volatility=GARCH(), distribution=StudentsT())
        res = ar.fit(update_freq=5, cov_type='mle')
        res.param_cov
        res.plot()
        res.plot(annualize='D')
        res.plot(annualize='W')
        res.plot(annualize='M')
        assert_raises(ValueError, res.plot, annualize='unknown')
        res.plot(scale=360)
        res.hedgehog_plot()
Beispiel #15
0
    def test_ar(self):
        ar = ARX(self.y, lags=3)
        params = np.array([1.0, 0.4, 0.3, 0.2, 1.0])
        data = ar.simulate(params, self.T)
        assert len(data) == self.T
        assert_equal(self.y, ar.y)

        bounds = ar.bounds()
        for b in bounds:
            assert_equal(b[0], -np.inf)
            assert_equal(b[1], np.inf)
        assert_equal(len(bounds), 4)

        assert_equal(ar.num_params, 4)
        assert ar.constant
        a, b = ar.constraints()
        assert_equal(a, np.empty((0, 4)))
        assert_equal(b, np.empty(0))
        res = ar.fit(disp=DISPLAY)

        nobs = 1000 - 3
        rhs = np.ones((nobs, 4))
        y = self.y
        lhs = y[3:1000]
        for i in range(3, 1000):
            rhs[i - 3, 1] = y[i - 1]
            rhs[i - 3, 2] = y[i - 2]
            rhs[i - 3, 3] = y[i - 3]
        params = np.linalg.pinv(rhs).dot(lhs)
        assert_almost_equal(params, res.params[:-1])

        forecasts = res.forecast(horizon=5)
        direct = pd.DataFrame(index=np.arange(y.shape[0]),
                              columns=['h.' + str(i + 1) for i in range(5)],
                              dtype=np.float64)
        params = res.params.iloc[:-1]
        for i in range(2, y.shape[0]):
            fcast = np.zeros(y.shape[0] + 5)
            fcast[:y.shape[0]] = y.copy()
            for h in range(1, 6):
                reg = np.array([1.0, fcast[i + h - 1],
                                fcast[i + h - 2], fcast[i + h - 3]])
                fcast[i + h] = reg.dot(params)
            direct.iloc[i, :] = fcast[i + 1:i + 6]
        assert isinstance(forecasts, ARCHModelForecast)
        # TODO
        # assert_frame_equal(direct, forecasts)

        assert_equal(ar.hold_back, None)
        assert_equal(ar.lags, np.array([[0, 1, 2], [1, 2, 3]]))
        assert_equal(ar.name, 'AR')
        assert_equal(ar.use_rotated, False)
        ar.__repr__()
        ar._repr_html_()

        ar = ARX(self.y_df, lags=5)
        ar.__repr__()
        ar = ARX(self.y_series, lags=5)
        ar.__repr__()
        res = ar.fit(disp=DISPLAY)
        assert isinstance(res.resid, pd.Series)
        assert isinstance(res.conditional_volatility, pd.Series)
        # Smoke bootstrap
        summ = ar.fit(disp=DISPLAY).summary()
        assert 'Constant Variance' in str(summ)
        ar = ARX(self.y, lags=1, volatility=GARCH(), distribution=StudentsT())
        res = ar.fit(disp=DISPLAY, update_freq=5, cov_type='mle')
        res.param_cov
        sims = res.forecast(horizon=5, method='simulation')
        assert isinstance(sims.simulations.residual_variances, np.ndarray)
        assert isinstance(sims.simulations.residuals, np.ndarray)
        assert isinstance(sims.simulations.values, np.ndarray)
        assert isinstance(sims.simulations.variances, np.ndarray)
Beispiel #16
0
def arch_model(y,
               x=None,
               mean='Constant',
               lags=0,
               vol='Garch',
               p=1,
               o=0,
               q=1,
               power=2.0,
               dist='Normal',
               hold_back=None):
    """
    Convenience function to simplify initialization of ARCH models

    Parameters
    ----------
    y : {ndarray, Series, None}
        The dependent variable
    x : {np.array, DataFrame}, optional
        Exogenous regressors.  Ignored if model does not permit exogenous
        regressors.
    mean : str, optional
        Name of the mean model.  Currently supported options are: 'Constant',
        'Zero', 'ARX' and  'HARX'
    lags : int or list (int), optional
        Either a scalar integer value indicating lag length or a list of
        integers specifying lag locations.
    vol : str, optional
        Name of the volatility model.  Currently supported options are:
        'GARCH' (default),  "EGARCH', 'ARCH' and 'HARCH'
    p : int, optional
        Lag order of the symmetric innovation
    o : int, optional
        Lag order of the asymmetric innovation
    q : int, optional
        Lag order of lagged volatility or equivalent
    power : float, optional
        Power to use with GARCH and related models
    dist : int, optional
        Name of the error distribution.  Currently supported options are:

            * Normal: 'normal', 'gaussian' (default)
            * Students's t: 't', 'studentst'
            * Skewed Student's t: 'skewstudent', 'skewt'
            * Generalized Error Distribution: 'ged', 'generalized error"

    hold_back : int
        Number of observations at the start of the sample to exclude when
        estimating model parameters.  Used when comparing models with different
        lag lengths to estimate on the common sample.

    Returns
    -------
    model : ARCHModel
        Configured ARCH model

    Examples
    --------
    >>> import datetime as dt
    >>> import pandas_datareader.data as web
    >>> djia = web.get_data_fred('DJIA')
    >>> returns = 100 * djia['DJIA'].pct_change().dropna()

    A basic GARCH(1,1) with a constant mean can be constructed using only
    the return data

    >>> from arch.univariate import arch_model
    >>> am = arch_model(returns)

    Alternative mean and volatility processes can be directly specified

    >>> am = arch_model(returns, mean='AR', lags=2, vol='harch', p=[1, 5, 22])

    This example demonstrates the construction of a zero mean process
    with a TARCH volatility process and Student t error distribution

    >>> am = arch_model(returns, mean='zero', p=1, o=1, q=1,
    ...                 power=1.0, dist='StudentsT')

    Notes
    -----
    Input that are not relevant for a particular specification, such as `lags`
    when `mean='zero'`, are silently ignored.
    """
    known_mean = ('zero', 'constant', 'harx', 'har', 'ar', 'arx', 'ls')
    known_vol = ('arch', 'garch', 'harch', 'constant', 'egarch')
    known_dist = ('normal', 'gaussian', 'studentst', 't', 'skewstudent',
                  'skewt', 'ged', 'generalized error')
    mean = mean.lower()
    vol = vol.lower()
    dist = dist.lower()
    if mean not in known_mean:
        raise ValueError('Unknown model type in mean')
    if vol.lower() not in known_vol:
        raise ValueError('Unknown model type in vol')
    if dist.lower() not in known_dist:
        raise ValueError('Unknown model type in dist')

    if mean == 'zero':
        am = ZeroMean(y, hold_back=hold_back)
    elif mean == 'constant':
        am = ConstantMean(y, hold_back=hold_back)
    elif mean == 'harx':
        am = HARX(y, x, lags, hold_back=hold_back)
    elif mean == 'har':
        am = HARX(y, None, lags, hold_back=hold_back)
    elif mean == 'arx':
        am = ARX(y, x, lags, hold_back=hold_back)
    elif mean == 'ar':
        am = ARX(y, None, lags, hold_back=hold_back)
    else:
        am = LS(y, x, hold_back=hold_back)

    if vol == 'constant':
        v = ConstantVariance()
    elif vol == 'arch':
        v = ARCH(p=p)
    elif vol == 'garch':
        v = GARCH(p=p, o=o, q=q, power=power)
    elif vol == 'egarch':
        v = EGARCH(p=p, o=o, q=q)
    else:  # vol == 'harch'
        v = HARCH(lags=p)

    if dist in ('skewstudent', 'skewt'):
        d = SkewStudent()
    elif dist in ('studentst', 't'):
        d = StudentsT()
    elif dist in ('ged', 'generalized error'):
        d = GeneralizedError()
    else:  # ('gaussian', 'normal')
        d = Normal()

    am.volatility = v
    am.distribution = d

    return am
Beispiel #17
0
 def test_egarch(self):
     cm = ConstantMean(self.y)
     cm.volatility = EGARCH()
     cm.fit(update_freq=0, disp=DISPLAY)
     cm.distribution = StudentsT()
     cm.fit(update_freq=0, disp=DISPLAY)
Beispiel #18
0
import pytest
from arch.univariate.distribution import (SkewStudent, Normal, StudentsT,
                                          GeneralizedError)
from numpy import exp, inf, log, nan, pi
from numpy.testing import assert_equal, assert_almost_equal
from scipy.integrate import quad
from scipy.special import gammaln

DISTRIBUTIONS = [(SkewStudent(), [6, -0.1]), (SkewStudent(), [6, -0.5]),
                 (SkewStudent(), [6, 0.1]), (SkewStudent(), [6, 0.5]),
                 (GeneralizedError(), [1.5]), (GeneralizedError(), [2.1]),
                 (StudentsT(), [6]), (StudentsT(), [7]), (Normal(), None)]


@pytest.mark.parametrize('dist, params', DISTRIBUTIONS)
def test_moment(dist, params):
    """
    Ensures that Distribtion.moment and .partial_moment agree
    with numeric integrals for order n=0,...,5 and z=+/-1,...,+/-5

    Parameters
    ----------
    dist : distribution.Distribution
        The distribution whose moments are being checked
    params : List
        List of parameters
    """

    assert_equal(dist.moment(-1, params), nan)
    assert_equal(dist.partial_moment(-1, 0., params), nan)