def test_to_ndarray(self): """ Tests :func:`colour.algebra.common.to_ndarray` definition. """ self.assertEqual(to_ndarray(1), np.array([1])) self.assertEqual(to_ndarray([1]), np.array([1])) self.assertEqual(to_ndarray((1, )), np.array((1, ))) self.assertEqual(to_ndarray(np.array([1])), np.array([1]))
def test_to_ndarray(self): """ Tests :func:`colour.algebra.common.to_ndarray` definition. """ self.assertEqual(to_ndarray(1), np.array([1])) self.assertEqual(to_ndarray([1]), np.array([1])) self.assertEqual(to_ndarray((1,)), np.array((1,))) self.assertEqual(to_ndarray(np.array([1])), np.array([1]))
def x(self, value): """ Setter for **self.__x** private attribute. Parameters ---------- value : array_like Attribute value. """ if value is not None: value = to_ndarray(value) assert value.ndim == 1, ( '"x" independent variable must have exactly one dimension!') assert is_uniform(value), ( '"x" independent variable is not uniform!') if not issubclass(value.dtype.type, np.inexact): value = value.astype(np.float_) value_steps = steps(value)[0] xp1 = value[0] - value_steps * 2 xp2 = value[0] - value_steps xp3 = value[-1] + value_steps xp4 = value[-1] + value_steps * 2 self.__xp = np.concatenate(((xp1, xp2), value, (xp3, xp4))) self.__x = value
def x(self, value): """ Setter for **self.__x** private attribute. Parameters ---------- value : array_like Attribute value. """ if value is not None: value = to_ndarray(value) assert value.ndim == 1, ( '"x" independent variable must have exactly one dimension!') assert is_uniform(value), ( '"x" independent variable is not uniform!') if not issubclass(value.dtype.type, np.inexact): value = value.astype(np.float_) value_steps = steps(value)[0] xp1 = value[0] - value_steps * 2 xp2 = value[0] - value_steps xp3 = value[-1] + value_steps xp4 = value[-1] + value_steps * 2 self.__xp = np.concatenate(((xp1, xp2), value, (xp3, xp4))) self.__x = value
def to_RGB(self, value): """ Setter for **self.__to_RGB** private attribute. Parameters ---------- value : array_like Attribute value. """ if value is not None: value = to_ndarray(value) self.__to_RGB = value
def primaries(self, value): """ Setter for **self.__primaries** private attribute. Parameters ---------- value : array_like, (3, 2) Attribute value. """ if value is not None: value = to_ndarray(value) self.__primaries = value
def to_RGB(self, value): """ Setter for **self.__to_RGB** private attribute. Parameters ---------- value : array_like Attribute value. """ if value is not None: value = to_ndarray(value) self.__to_RGB = value
def primaries(self, value): """ Setter for **self.__primaries** private attribute. Parameters ---------- value : array_like, (3, 2) Attribute value. """ if value is not None: value = to_ndarray(value) self.__primaries = value
def x(self, value): """ Setter for **self.__x** private attribute. Parameters ---------- value : array_like Attribute value. """ if value is not None: value = to_ndarray(value) assert value.ndim == 1, ( '"x" independent variable must have exactly one dimension!') if not issubclass(value.dtype.type, np.inexact): value = value.astype(np.float_) self.__x = value
def x(self, value): """ Setter for **self.__x** private attribute. Parameters ---------- value : array_like Attribute value. """ if value is not None: value = to_ndarray(value) assert value.ndim == 1, ( '"x" independent variable must have exactly one dimension!') if not issubclass(value.dtype.type, np.inexact): value = value.astype(np.float_) self.__x = value
def __call__(self, x): """ Evaluates the Extrapolator1d at given point(s). Parameters ---------- x : numeric or array_like Point(s) to evaluate the Extrapolator1d at. Returns ------- float or ndarray Extrapolated points value(s). """ xe = self.__evaluate(to_ndarray(x)) if is_numeric(x): return float(xe) else: return xe
def __call__(self, x): """ Evaluates the interpolating polynomial at given point(s). Parameters ---------- x : numeric or array_like Point(s) to evaluate the interpolant at. Returns ------- float or ndarray Interpolated value(s). """ xi = self.__evaluate(to_ndarray(x)) if is_numeric(x): return float(xi) else: return xi
def __call__(self, x): """ Evaluates the Extrapolator1d at given point(s). Parameters ---------- x : numeric or array_like Point(s) to evaluate the Extrapolator1d at. Returns ------- float or ndarray Extrapolated points value(s). """ xe = self.__evaluate(to_ndarray(x)) if is_numeric(x): return float(xe) else: return xe
def __call__(self, x): """ Evaluates the interpolating polynomial at given point(s). Parameters ---------- x : numeric or array_like Point(s) to evaluate the interpolant at. Returns ------- float or ndarray Interpolated value(s). """ xi = self.__evaluate(to_ndarray(x)) if is_numeric(x): return float(xi) else: return xi
def y(self, value): """ Setter for **self.__y** private attribute. Parameters ---------- value : array_like Attribute value. """ if value is not None: value = to_ndarray(value) assert value.ndim == 1, ( '"y" dependent variable must have exactly one dimension!') assert len(value) >= 6, ( '"y" dependent variable values count must be in domain [6:]!') if not issubclass(value.dtype.type, np.inexact): value = value.astype(np.float_) yp1 = np.ravel((np.dot(self.SPRAGUE_C_COEFFICIENTS[0], np.array(value[0:6]).reshape( (6, 1)))) / 209)[0] yp2 = np.ravel((np.dot(self.SPRAGUE_C_COEFFICIENTS[1], np.array(value[0:6]).reshape( (6, 1)))) / 209)[0] yp3 = np.ravel((np.dot(self.SPRAGUE_C_COEFFICIENTS[2], np.array(value[-6:]).reshape( (6, 1)))) / 209)[0] yp4 = np.ravel((np.dot(self.SPRAGUE_C_COEFFICIENTS[3], np.array(value[-6:]).reshape( (6, 1)))) / 209)[0] self.__yp = np.concatenate(((yp1, yp2), value, (yp3, yp4))) self.__y = value
def y(self, value): """ Setter for **self.__y** private attribute. Parameters ---------- value : array_like Attribute value. """ if value is not None: value = to_ndarray(value) assert value.ndim == 1, ( '"y" dependent variable must have exactly one dimension!') assert len(value) >= 6, ( '"y" dependent variable values count must be in domain [6:]!') if not issubclass(value.dtype.type, np.inexact): value = value.astype(np.float_) yp1 = np.ravel((np.dot( self.SPRAGUE_C_COEFFICIENTS[0], np.array(value[0:6]).reshape((6, 1)))) / 209)[0] yp2 = np.ravel((np.dot( self.SPRAGUE_C_COEFFICIENTS[1], np.array(value[0:6]).reshape((6, 1)))) / 209)[0] yp3 = np.ravel((np.dot( self.SPRAGUE_C_COEFFICIENTS[2], np.array(value[-6:]).reshape((6, 1)))) / 209)[0] yp4 = np.ravel((np.dot( self.SPRAGUE_C_COEFFICIENTS[3], np.array(value[-6:]).reshape((6, 1)))) / 209)[0] self.__yp = np.concatenate(((yp1, yp2), value, (yp3, yp4))) self.__y = value
def linear_regression(y, x=None, additional_statistics=False): """ Performs the statistics computation about the ideal trend line from given data using the *least-squares* method. The equation of the line is :math:`y=b+mx` or :math:`y=b+m1x1+m1x2+...+mnxn` where the dependent variable :math:`y` value is a function of the independent variable :math:`x` values. Parameters ---------- y : array_like Dependent and already known :math:`y` variable values used to curve fit an ideal trend line. x : array_like, optional Independent :math:`x` variable(s) values corresponding with :math:`y` variable. additional_statistics : ndarray Output additional regression statistics, by default only the :math:`b` variable and :math:`m` coefficients are returned. Returns ------- ndarray, ({{mn, mn-1, ..., b}, {sum_of_squares_residual}}) Regression statistics. Raises ------ ValueError If :math:`y` and :math:`x` variables have incompatible dimensions. References ---------- .. [2] http://en.wikipedia.org/wiki/Simple_linear_regression (Last accessed 24 May 2014) Examples -------- Linear regression with the dependent and already known :math:`y` variable: >>> y = np.array([1, 2, 1, 3, 2, 3, 3, 4, 4, 3]) >>> linear_regression(y) # doctest: +ELLIPSIS array([ 0.2909090..., 1. ]) Linear regression with the dependent :math:`y` variable and independent :math:`x` variable: >>> x1 = np.array([40, 45, 38, 50, 48, 55, 53, 55, 58, 40]) >>> linear_regression(y, x1) # doctest: +ELLIPSIS array([ 0.1225194..., -3.3054357...]) Multiple linear regression with the dependent :math:`y` variable and multiple independent :math:`x_i` variables: >>> x2 = np.array([25, 20, 30, 30, 28, 30, 34, 36, 32, 34]) >>> linear_regression(y, tuple(zip(x1, x2))) # doctest: +ELLIPSIS array([ 0.0998002..., 0.0876257..., -4.8303807...]) Multiple linear regression with additional statistics: >>> linear_regression(y, tuple(zip(x1, x2)), True) # doctest: +ELLIPSIS (array([ 0.0998002..., 0.0876257..., -4.8303807...]), array([ 2.1376249...])) """ y = to_ndarray(y) if x is None: x = np.arange(1, len(y) + 1) else: x = to_ndarray(x) if len(x) != len(y): raise ValueError( '"y" and "x" variables have incompatible dimensions!') x = np.vstack([np.array(x).T, np.ones(len(x))]).T result = np.linalg.lstsq(x, y) if additional_statistics: return result[0:2] else: return result[0]
def linear_regression(y, x=None, additional_statistics=False): """ Performs the statistics computation about the ideal trend line from given data using the *least-squares* method. The equation of the line is :math:`y=b+mx` or :math:`y=b+m1x1+m1x2+...+mnxn` where the dependent variable :math:`y` value is a function of the independent variable :math:`x` values. Parameters ---------- y : array_like Dependent and already known :math:`y` variable values used to curve fit an ideal trend line. x : array_like, optional Independent :math:`x` variable(s) values corresponding with :math:`y` variable. additional_statistics : ndarray Output additional regression statistics, by default only the :math:`b` variable and :math:`m` coefficients are returned. Returns ------- ndarray, ({{mn, mn-1, ..., b}, {sum_of_squares_residual}}) Regression statistics. Raises ------ ValueError If :math:`y` and :math:`x` variables have incompatible dimensions. References ---------- .. [2] http://en.wikipedia.org/wiki/Simple_linear_regression (Last accessed 24 May 2014) Examples -------- Linear regression with the dependent and already known :math:`y` variable: >>> y = np.array([1, 2, 1, 3, 2, 3, 3, 4, 4, 3]) >>> linear_regression(y) # doctest: +ELLIPSIS array([ 0.2909090..., 1. ]) Linear regression with the dependent :math:`y` variable and independent :math:`x` variable: >>> x1 = np.array([40, 45, 38, 50, 48, 55, 53, 55, 58, 40]) >>> linear_regression(y, x1) # doctest: +ELLIPSIS array([ 0.1225194..., -3.3054357...]) Multiple linear regression with the dependent :math:`y` variable and multiple independent :math:`x_i` variables: >>> x2 = np.array([25, 20, 30, 30, 28, 30, 34, 36, 32, 34]) >>> linear_regression(y, tuple(zip(x1, x2))) # doctest: +ELLIPSIS array([ 0.0998002..., 0.0876257..., -4.8303807...]) Multiple linear regression with additional statistics: >>> linear_regression(y, tuple(zip(x1, x2)), True) # doctest: +ELLIPSIS (array([ 0.0998002..., 0.0876257..., -4.8303807...]), array([ 2.1376249...])) """ y = to_ndarray(y) if x is None: x = np.arange(1, len(y) + 1) else: x = to_ndarray(x) if len(x) != len(y): raise ValueError( '"y" and "x" variables have incompatible dimensions!') x = np.vstack([np.array(x).T, np.ones(len(x))]).T result = np.linalg.lstsq(x, y) if additional_statistics: return result[0:2] else: return result[0]