示例#1
0
    def test_from_range_degrees(self):
        """Test :func:`colour.utilities.common.from_range_degrees` definition."""

        with domain_range_scale("Reference"):
            self.assertEqual(from_range_degrees(1), 1)

        with domain_range_scale("1"):
            self.assertEqual(from_range_degrees(1), 1 / 360)

        with domain_range_scale("100"):
            self.assertEqual(from_range_degrees(1), 1 / 3.6)

        with domain_range_scale("100"):
            self.assertEqual(from_range_degrees(1, np.pi), 1 / (np.pi / 100))
示例#2
0
    def test_from_range_degrees(self):
        """
        Tests :func:`colour.utilities.common.from_range_degrees` definition.
        """

        with domain_range_scale('Reference'):
            self.assertEqual(from_range_degrees(1), 1)

        with domain_range_scale('1'):
            self.assertEqual(from_range_degrees(1), 1 / 360)

        with domain_range_scale('100'):
            self.assertEqual(from_range_degrees(1), 1 / 3.6)

        with domain_range_scale('100'):
            self.assertEqual(from_range_degrees(1, np.pi), 1 / (np.pi / 100))
示例#3
0
def Lab_to_LCHab(Lab):
    """
    Converts from *CIE L\\*a\\*b\\** colourspace to *CIE L\\*C\\*Hab*
    colourspace.

    Parameters
    ----------
    Lab : array_like
        *CIE L\\*a\\*b\\** colourspace array.

    Returns
    -------
    ndarray
        *CIE L\\*C\\*Hab* colourspace array.

    Notes
    -----

    +------------+-----------------------+-----------------+
    | **Domain** | **Scale - Reference** | **Scale - 1**   |
    +============+=======================+=================+
    | ``Lab``    | ``L`` : [0, 100]      | ``L`` : [0, 1]  |
    |            |                       |                 |
    |            | ``a`` : [-100, 100]   | ``a`` : [-1, 1] |
    |            |                       |                 |
    |            | ``b`` : [-100, 100]   | ``b`` : [-1, 1] |
    +------------+-----------------------+-----------------+

    +------------+-----------------------+-----------------+
    | **Range**  | **Scale - Reference** | **Scale - 1**   |
    +============+=======================+=================+
    | ``LCHab``  | ``L``  : [0, 100]     | ``L``  : [0, 1] |
    |            |                       |                 |
    |            | ``C``  : [0, 100]     | ``C``  : [0, 1] |
    |            |                       |                 |
    |            | ``ab`` : [0, 360]     | ``ab`` : [0, 1] |
    +------------+-----------------------+-----------------+

    References
    ----------
    :cite:`CIETC1-482004m`

    Examples
    --------
    >>> Lab = np.array([41.52787529, 52.63858304, 26.92317922])
    >>> Lab_to_LCHab(Lab)  # doctest: +ELLIPSIS
    array([ 41.5278752...,  59.1242590...,  27.0884878...])
    """

    L, a, b = tsplit(Lab)

    C, H = tsplit(cartesian_to_polar(tstack([a, b])))

    LCHab = tstack([L, C, from_range_degrees(np.degrees(H) % 360)])

    return LCHab
示例#4
0
def Luv_to_LCHuv(Luv):
    """
    Converts from *CIE L\\*u\\*v\\** colourspace to *CIE L\\*C\\*Huv*
    colourspace.

    Parameters
    ----------
    Luv : array_like
        *CIE L\\*u\\*v\\** colourspace array.

    Returns
    -------
    ndarray
        *CIE L\\*C\\*Huv* colourspace array.

    Notes
    -----

    +------------+-----------------------+-----------------+
    | **Domain** | **Scale - Reference** | **Scale - 1**   |
    +============+=======================+=================+
    | ``Luv``    | ``L`` : [0, 100]      | ``L`` : [0, 1]  |
    |            |                       |                 |
    |            | ``u`` : [-100, 100]   | ``u`` : [-1, 1] |
    |            |                       |                 |
    |            | ``v`` : [-100, 100]   | ``v`` : [-1, 1] |
    +------------+-----------------------+-----------------+

    +------------+-----------------------+-----------------+
    | **Range**  | **Scale - Reference** | **Scale - 1**   |
    +============+=======================+=================+
    | ``LCHuv``  | ``L``  : [0, 100]     | ``L``  : [0, 1] |
    |            |                       |                 |
    |            | ``C``  : [0, 100]     | ``C``  : [0, 1] |
    |            |                       |                 |
    |            | ``uv`` : [0, 360]     | ``uv`` : [0, 1] |
    +------------+-----------------------+-----------------+

    References
    ----------
    :cite:`CIETC1-482004m`

    Examples
    --------
    >>> Luv = np.array([41.52787529, 96.83626054, 17.75210149])
    >>> Luv_to_LCHuv(Luv)  # doctest: +ELLIPSIS
    array([ 41.5278752...,  98.4499795...,  10.3881634...])
    """

    L, u, v = tsplit(Luv)

    C, H = tsplit(cartesian_to_polar(tstack([u, v])))

    LCHuv = tstack([L, C, from_range_degrees(np.degrees(H) % 360)])

    return LCHuv
示例#5
0
文件: cie_luv.py 项目: vidakDK/colour
def Luv_to_LCHuv(Luv):
    """
    Converts from *CIE L\\*u\\*v\\** colourspace to *CIE L\\*C\\*Huv*
    colourspace.

    Parameters
    ----------
    Luv : array_like
        *CIE L\\*u\\*v\\** colourspace array.

    Returns
    -------
    ndarray
        *CIE L\\*C\\*Huv* colourspace array.

    Notes
    -----

    +------------+-----------------------+-----------------+
    | **Domain** | **Scale - Reference** | **Scale - 1**   |
    +============+=======================+=================+
    | ``Luv``    | ``L`` : [0, 100]      | ``L`` : [0, 1]  |
    |            |                       |                 |
    |            | ``u`` : [-100, 100]   | ``u`` : [-1, 1] |
    |            |                       |                 |
    |            | ``v`` : [-100, 100]   | ``v`` : [-1, 1] |
    +------------+-----------------------+-----------------+

    +------------+-----------------------+-----------------+
    | **Range**  | **Scale - Reference** | **Scale - 1**   |
    +============+=======================+=================+
    | ``LCHuv``  | ``L``  : [0, 100]     | ``L``  : [0, 1] |
    |            |                       |                 |
    |            | ``C``  : [0, 100]     | ``C``  : [0, 1] |
    |            |                       |                 |
    |            | ``uv`` : [0, 360]     | ``uv`` : [0, 1] |
    +------------+-----------------------+-----------------+

    References
    ----------
    :cite:`CIETC1-482004m`

    Examples
    --------
    >>> Luv = np.array([41.52787529, 96.83626054, 17.75210149])
    >>> Luv_to_LCHuv(Luv)  # doctest: +ELLIPSIS
    array([ 41.5278752...,  98.4499795...,  10.3881634...])
    """

    L, u, v = tsplit(Luv)

    C, H = tsplit(cartesian_to_polar(tstack([u, v])))

    LCHuv = tstack([L, C, from_range_degrees(np.degrees(H) % 360)])

    return LCHuv
示例#6
0
def IPT_hue_angle(IPT):
    """
    Computes the hue angle in degrees from *IPT* colourspace.

    Parameters
    ----------
    IPT : array_like
        *IPT* colourspace array.

    Returns
    -------
    numeric or ndarray
        Hue angle in degrees.

    Notes
    -----

    +------------+-----------------------+-----------------+
    | **Domain** | **Scale - Reference** | **Scale - 1**   |
    +============+=======================+=================+
    | ``IPT``    | ``I`` : [0, 1]        | ``I`` : [0, 1]  |
    |            |                       |                 |
    |            | ``P`` : [-1, 1]       | ``P`` : [-1, 1] |
    |            |                       |                 |
    |            | ``T`` : [-1, 1]       | ``T`` : [-1, 1] |
    +------------+-----------------------+-----------------+

    +------------+-----------------------+-----------------+
    | **Range**  | **Scale - Reference** | **Scale - 1**   |
    +============+=======================+=================+
    | ``hue``    | [0, 360]              | [0, 1]          |
    +------------+-----------------------+-----------------+

    References
    ----------
    :cite:`Fairchild2013y`

    Examples
    --------
    >>> IPT = np.array([0.96907232, 1, 1.12179215])
    >>> IPT_hue_angle(IPT)  # doctest: +ELLIPSIS
    48.2852074...
    """

    _I, P, T = tsplit(to_domain_1(IPT))

    hue = np.degrees(np.arctan2(T, P)) % 360

    return from_range_degrees(hue)
示例#7
0
def IPT_hue_angle(IPT):
    """
    Computes the hue angle in degrees from *IPT* colourspace.

    Parameters
    ----------
    IPT : array_like
        *IPT* colourspace array.

    Returns
    -------
    numeric or ndarray
        Hue angle in degrees.

    Notes
    -----

    +------------+-----------------------+-----------------+
    | **Domain** | **Scale - Reference** | **Scale - 1**   |
    +============+=======================+=================+
    | ``IPT``    | ``I`` : [0, 1]        | ``I`` : [0, 1]  |
    |            |                       |                 |
    |            | ``P`` : [-1, 1]       | ``P`` : [-1, 1] |
    |            |                       |                 |
    |            | ``T`` : [-1, 1]       | ``T`` : [-1, 1] |
    +------------+-----------------------+-----------------+

    +------------+-----------------------+-----------------+
    | **Range**  | **Scale - Reference** | **Scale - 1**   |
    +============+=======================+=================+
    | ``hue``    | [0, 360]              | [0, 1]          |
    +------------+-----------------------+-----------------+

    References
    ----------
    :cite:`Fairchild2013y`

    Examples
    --------
    >>> IPT = np.array([0.96907232, 1, 1.12179215])
    >>> IPT_hue_angle(IPT)  # doctest: +ELLIPSIS
    48.2852074...
    """

    _I, P, T = tsplit(to_domain_1(IPT))

    hue = np.degrees(np.arctan2(T, P)) % 360

    return from_range_degrees(hue)
示例#8
0
def XYZ_to_RLAB(XYZ,
                XYZ_n,
                Y_n,
                sigma=VIEWING_CONDITIONS_RLAB['Average'],
                D=D_FACTOR_RLAB['Hard Copy Images']):
    """
    Computes the *RLAB* model color appearance correlates.

    Parameters
    ----------
    XYZ : array_like
        *CIE XYZ* tristimulus values of test sample / stimulus.
    XYZ_n : array_like
        *CIE XYZ* tristimulus values of reference white.
    Y_n : numeric or array_like
        Absolute adapting luminance in :math:`cd/m^2`.
    sigma : numeric or array_like, optional
        Relative luminance of the surround, see
        :attr:`colour.VIEWING_CONDITIONS_RLAB` for reference.
    D : numeric or array_like, optional
        *Discounting-the-Illuminant* factor normalised to domain [0, 1].

    Returns
    -------
    CAM_Specification_RLAB
        *RLAB* colour appearance model specification.

    Notes
    -----

    +--------------------------+-----------------------+---------------+
    | **Domain**               | **Scale - Reference** | **Scale - 1** |
    +==========================+=======================+===============+
    | ``XYZ``                  | [0, 100]              | [0, 1]        |
    +--------------------------+-----------------------+---------------+
    | ``XYZ_n``                | [0, 100]              | [0, 1]        |
    +--------------------------+-----------------------+---------------+

    +------------------------------+-----------------------\
+---------------+
    | **Range**                    | **Scale - Reference** \
| **Scale - 1** |
    +==============================+=======================\
+===============+
    | ``CAM_Specification_RLAB.h`` | [0, 360]              \
| [0, 1]        |
    +------------------------------+-----------------------\
+---------------+

    References
    ----------
    :cite:`Fairchild1996a`, :cite:`Fairchild2013w`

    Examples
    --------
    >>> XYZ = np.array([19.01, 20.00, 21.78])
    >>> XYZ_n = np.array([109.85, 100, 35.58])
    >>> Y_n = 31.83
    >>> sigma = VIEWING_CONDITIONS_RLAB['Average']
    >>> D = D_FACTOR_RLAB['Hard Copy Images']
    >>> XYZ_to_RLAB(XYZ, XYZ_n, Y_n, sigma, D)  # doctest: +ELLIPSIS
    CAM_Specification_RLAB(J=49.8347069..., C=54.8700585..., \
h=286.4860208..., s=1.1010410..., HC=None, a=15.5711021..., b=-52.6142956...)
    """

    XYZ = to_domain_100(XYZ)
    XYZ_n = to_domain_100(XYZ_n)
    Y_n = as_float_array(Y_n)
    D = as_float_array(D)
    sigma = as_float_array(sigma)

    # Converting to cone responses.
    LMS_n = XYZ_to_rgb(XYZ_n)

    # Computing the :math:`A` matrix.
    LMS_l_E = (3 * LMS_n) / (LMS_n[0] + LMS_n[1] + LMS_n[2])
    LMS_p_L = ((1 + spow(Y_n[..., np.newaxis], 1 / 3) + LMS_l_E) /
               (1 + spow(Y_n[..., np.newaxis], 1 / 3) + (1 / LMS_l_E)))
    LMS_a_L = (LMS_p_L + D[..., np.newaxis] * (1 - LMS_p_L)) / LMS_n

    aR = row_as_diagonal(LMS_a_L)
    M = matrix_dot(matrix_dot(MATRIX_R, aR), MATRIX_XYZ_TO_HPE)
    XYZ_ref = vector_dot(M, XYZ)

    X_ref, Y_ref, Z_ref = tsplit(XYZ_ref)

    # Computing the correlate of *Lightness* :math:`L^R`.
    LR = 100 * spow(Y_ref, sigma)

    # Computing opponent colour dimensions :math:`a^R` and :math:`b^R`.
    aR = 430 * (spow(X_ref, sigma) - spow(Y_ref, sigma))
    bR = 170 * (spow(Y_ref, sigma) - spow(Z_ref, sigma))

    # Computing the *hue* angle :math:`h^R`.
    hR = np.degrees(np.arctan2(bR, aR)) % 360
    # TODO: Implement hue composition computation.

    # Computing the correlate of *chroma* :math:`C^R`.
    CR = np.hypot(aR, bR)

    # Computing the correlate of *saturation* :math:`s^R`.
    sR = CR / LR

    return CAM_Specification_RLAB(LR, CR, from_range_degrees(hR), sR, None, aR,
                                  bR)
示例#9
0
def XYZ_to_CIECAM02(
    XYZ: ArrayLike,
    XYZ_w: ArrayLike,
    L_A: FloatingOrArrayLike,
    Y_b: FloatingOrArrayLike,
    surround: InductionFactors_CIECAM02 = VIEWING_CONDITIONS_CIECAM02[
        "Average"],
    discount_illuminant: Boolean = False,
) -> CAM_Specification_CIECAM02:
    """
    Compute the *CIECAM02* colour appearance model correlates from given
    *CIE XYZ* tristimulus values.

    Parameters
    ----------
    XYZ
        *CIE XYZ* tristimulus values of test sample / stimulus.
    XYZ_w
        *CIE XYZ* tristimulus values of reference white.
    L_A
        Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`, (often taken
        to be 20% of the luminance of a white object in the scene).
    Y_b
        Luminous factor of background :math:`Y_b` such as
        :math:`Y_b = 100 x L_b / L_w` where :math:`L_w` is the luminance of the
        light source and :math:`L_b` is the luminance of the background. For
        viewing images, :math:`Y_b` can be the average :math:`Y` value for the
        pixels in the entire image, or frequently, a :math:`Y` value of 20,
        approximate an :math:`L^*` of 50 is used.
    surround
        Surround viewing conditions induction factors.
    discount_illuminant
        Truth value indicating if the illuminant should be discounted.

    Returns
    -------
    :class:`colour.CAM_Specification_CIECAM02`
        *CIECAM02* colour appearance model specification.

    Notes
    -----
    +------------+-----------------------+---------------+
    | **Domain** | **Scale - Reference** | **Scale - 1** |
    +============+=======================+===============+
    | ``XYZ``    | [0, 100]              | [0, 1]        |
    +------------+-----------------------+---------------+
    | ``XYZ_w``  | [0, 100]              | [0, 1]        |
    +------------+-----------------------+---------------+

    +----------------------------------+-----------------------\
+---------------+
    | **Range**                        | **Scale - Reference** \
| **Scale - 1** |
    +==================================+=======================\
+===============+
    | ``CAM_Specification_CIECAM02.J`` | [0, 100]              \
| [0, 1]        |
    +----------------------------------+-----------------------\
+---------------+
    | ``CAM_Specification_CIECAM02.C`` | [0, 100]              \
| [0, 1]        |
    +----------------------------------+-----------------------\
+---------------+
    | ``CAM_Specification_CIECAM02.h`` | [0, 360]              \
| [0, 1]        |
    +----------------------------------+-----------------------\
+---------------+
    | ``CAM_Specification_CIECAM02.s`` | [0, 100]              \
| [0, 1]        |
    +----------------------------------+-----------------------\
+---------------+
    | ``CAM_Specification_CIECAM02.Q`` | [0, 100]              \
| [0, 1]        |
    +----------------------------------+-----------------------\
+---------------+
    | ``CAM_Specification_CIECAM02.M`` | [0, 100]              \
| [0, 1]        |
    +----------------------------------+-----------------------\
+---------------+
    | ``CAM_Specification_CIECAM02.H`` | [0, 400]              \
| [0, 1]        |
    +----------------------------------+-----------------------\
+---------------+

    References
    ----------
    :cite:`Fairchild2004c`, :cite:`Luo2013`, :cite:`Moroneya`,
    :cite:`Wikipedia2007a`

    Examples
    --------
    >>> XYZ = np.array([19.01, 20.00, 21.78])
    >>> XYZ_w = np.array([95.05, 100.00, 108.88])
    >>> L_A = 318.31
    >>> Y_b = 20.0
    >>> surround = VIEWING_CONDITIONS_CIECAM02['Average']
    >>> XYZ_to_CIECAM02(XYZ, XYZ_w, L_A, Y_b, surround)  # doctest: +ELLIPSIS
    CAM_Specification_CIECAM02(J=41.7310911..., C=0.1047077..., \
h=219.0484326..., s=2.3603053..., Q=195.3713259..., M=0.1088421..., \
H=278.0607358..., HC=None)
    """

    XYZ = to_domain_100(XYZ)
    XYZ_w = to_domain_100(XYZ_w)
    _X_w, Y_w, _Z_w = tsplit(XYZ_w)
    L_A = as_float_array(L_A)
    Y_b = as_float_array(Y_b)

    n, F_L, N_bb, N_cb, z = viewing_condition_dependent_parameters(
        Y_b, Y_w, L_A)

    # Converting *CIE XYZ* tristimulus values to *CMCCAT2000* transform
    # sharpened *RGB* values.
    RGB = vector_dot(CAT_CAT02, XYZ)
    RGB_w = vector_dot(CAT_CAT02, XYZ_w)

    # Computing degree of adaptation :math:`D`.
    D = (degree_of_adaptation(surround.F, L_A)
         if not discount_illuminant else ones(L_A.shape))

    # Computing full chromatic adaptation.
    RGB_c = full_chromatic_adaptation_forward(RGB, RGB_w, Y_w, D)
    RGB_wc = full_chromatic_adaptation_forward(RGB_w, RGB_w, Y_w, D)

    # Converting to *Hunt-Pointer-Estevez* colourspace.
    RGB_p = RGB_to_rgb(RGB_c)
    RGB_pw = RGB_to_rgb(RGB_wc)

    # Applying forward post-adaptation non-linear response compression.
    RGB_a = post_adaptation_non_linear_response_compression_forward(RGB_p, F_L)
    RGB_aw = post_adaptation_non_linear_response_compression_forward(
        RGB_pw, F_L)

    # Converting to preliminary cartesian coordinates.
    a, b = tsplit(opponent_colour_dimensions_forward(RGB_a))

    # Computing the *hue* angle :math:`h`.
    h = hue_angle(a, b)

    # Computing hue :math:`h` quadrature :math:`H`.
    H = hue_quadrature(h)
    # TODO: Compute hue composition.

    # Computing eccentricity factor *e_t*.
    e_t = eccentricity_factor(h)

    # Computing achromatic responses for the stimulus and the whitepoint.
    A = achromatic_response_forward(RGB_a, N_bb)
    A_w = achromatic_response_forward(RGB_aw, N_bb)

    # Computing the correlate of *Lightness* :math:`J`.
    J = lightness_correlate(A, A_w, surround.c, z)

    # Computing the correlate of *brightness* :math:`Q`.
    Q = brightness_correlate(surround.c, J, A_w, F_L)

    # Computing the correlate of *chroma* :math:`C`.
    C = chroma_correlate(J, n, surround.N_c, N_cb, e_t, a, b, RGB_a)

    # Computing the correlate of *colourfulness* :math:`M`.
    M = colourfulness_correlate(C, F_L)

    # Computing the correlate of *saturation* :math:`s`.
    s = saturation_correlate(M, Q)

    return CAM_Specification_CIECAM02(
        as_float(from_range_100(J)),
        as_float(from_range_100(C)),
        as_float(from_range_degrees(h)),
        as_float(from_range_100(s)),
        as_float(from_range_100(Q)),
        as_float(from_range_100(M)),
        as_float(from_range_degrees(H, 400)),
        None,
    )
示例#10
0
文件: hunt.py 项目: Munins-eye/colour
def XYZ_to_Hunt(XYZ,
                XYZ_w,
                XYZ_b,
                L_A,
                surround=HUNT_VIEWING_CONDITIONS['Normal Scenes'],
                L_AS=None,
                CCT_w=None,
                XYZ_p=None,
                p=None,
                S=None,
                S_w=None,
                helson_judd_effect=False,
                discount_illuminant=True):
    """
    Computes the *Hunt* colour appearance model correlates.

    Parameters
    ----------
    XYZ : array_like
        *CIE XYZ* tristimulus values of test sample / stimulus.
    XYZ_w : array_like
        *CIE XYZ* tristimulus values of reference white.
    XYZ_b : array_like
        *CIE XYZ* tristimulus values of background.
    L_A : numeric or array_like
        Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`.
    surround : Hunt_InductionFactors, optional
         Surround viewing conditions induction factors.
    L_AS : numeric or array_like, optional
        Scotopic luminance :math:`L_{AS}` of the illuminant, approximated if
        not specified.
    CCT_w : numeric or array_like, optional
        Correlated color temperature :math:`T_{cp}`: of the illuminant, needed
        to approximate :math:`L_{AS}`.
    XYZ_p : array_like, optional
        *CIE XYZ* tristimulus values of proximal field, assumed to be equal to
        background if not specified.
    p : numeric or array_like, optional
        Simultaneous contrast / assimilation factor :math:`p` with value
        normalised to domain [-1, 0] when simultaneous contrast occurs and
        normalised to domain [0, 1] when assimilation occurs.
    S : numeric or array_like, optional
        Scotopic response :math:`S` to the stimulus, approximated using
        tristimulus values :math:`Y` of the stimulus if not specified.
    S_w : numeric or array_like, optional
        Scotopic response :math:`S_w` for the reference white, approximated
        using the tristimulus values :math:`Y_w` of the reference white if not
        specified.
    helson_judd_effect : bool, optional
        Truth value indicating whether the *Helson-Judd* effect should be
        accounted for.
    discount_illuminant : bool, optional
       Truth value indicating if the illuminant should be discounted.

    Returns
    -------
    Hunt_Specification
        *Hunt* colour appearance model specification.

    Raises
    ------
    ValueError
        If an illegal arguments combination is specified.

    Notes
    -----

    +--------------------------+-----------------------+---------------+
    | **Domain**               | **Scale - Reference** | **Scale - 1** |
    +==========================+=======================+===============+
    | ``XYZ``                  | [0, 100]              | [0, 1]        |
    +--------------------------+-----------------------+---------------+
    | ``XYZ_w``                | [0, 100]              | [0, 1]        |
    +--------------------------+-----------------------+---------------+
    | ``XYZ_b``                | [0, 100]              | [0, 1]        |
    +--------------------------+-----------------------+---------------+
    | ``XYZ_p``                | [0, 100]              | [0, 1]        |
    +--------------------------+-----------------------+---------------+

    +--------------------------+-----------------------+---------------+
    | **Range**                | **Scale - Reference** | **Scale - 1** |
    +==========================+=======================+===============+
    | ``Hunt_Specification.h`` | [0, 360]              | [0, 1]        |
    +--------------------------+-----------------------+---------------+

    References
    ----------
    :cite:`Fairchild2013u`, :cite:`Hunt2004b`

    Examples
    --------
    >>> XYZ = np.array([19.01, 20.00, 21.78])
    >>> XYZ_w = np.array([95.05, 100.00, 108.88])
    >>> XYZ_b = np.array([95.05, 100.00, 108.88])
    >>> L_A = 318.31
    >>> surround = HUNT_VIEWING_CONDITIONS['Normal Scenes']
    >>> CCT_w = 6504.0
    >>> XYZ_to_Hunt(XYZ, XYZ_w, XYZ_b, L_A, surround, CCT_w=CCT_w)
    ... # doctest: +ELLIPSIS
    Hunt_Specification(J=30.0462678..., C=0.1210508..., h=269.2737594..., \
s=0.0199093..., Q=22.2097654..., M=0.1238964..., H=None, HC=None)
    """
    XYZ = to_domain_100(XYZ)
    XYZ_w = to_domain_100(XYZ_w)
    XYZ_b = to_domain_100(XYZ_b)
    _X, Y, _Z = tsplit(XYZ)
    _X_w, Y_w, _Z_w = tsplit(XYZ_w)
    X_b, Y_b, _Z_b = tsplit(XYZ_b)

    # Arguments handling.
    if XYZ_p is not None:
        X_p, Y_p, Z_p = tsplit(to_domain_100(XYZ_p))
    else:
        X_p = X_b
        Y_p = Y_b
        Z_p = Y_b
        usage_warning('Unspecified proximal field "XYZ_p" argument, using '
                      'background "XYZ_b" as approximation!')

    if surround.N_cb is None:
        N_cb = 0.725 * spow(Y_w / Y_b, 0.2)
        usage_warning('Unspecified "N_cb" argument, using approximation: '
                      '"{0}"'.format(N_cb))
    if surround.N_bb is None:
        N_bb = 0.725 * spow(Y_w / Y_b, 0.2)
        usage_warning('Unspecified "N_bb" argument, using approximation: '
                      '"{0}"'.format(N_bb))

    if L_AS is None and CCT_w is None:
        raise ValueError('Either the scotopic luminance "L_AS" of the '
                         'illuminant or its correlated colour temperature '
                         '"CCT_w" must be specified!')
    if L_AS is None:
        L_AS = illuminant_scotopic_luminance(L_A, CCT_w)
        usage_warning(
            'Unspecified "L_AS" argument, using approximation from "CCT": '
            '"{0}"'.format(L_AS))

    if (S is None and S_w is not None) or (S is not None and S_w is None):
        raise ValueError('Either both stimulus scotopic response "S" and '
                         'reference white scotopic response "S_w" arguments '
                         'need to be specified or none of them!')
    elif S is None and S_w is None:
        S = Y
        S_w = Y_w
        usage_warning(
            'Unspecified stimulus scotopic response "S" and reference '
            'white scotopic response "S_w" arguments, using '
            'approximation: "{0}", "{1}"'.format(S, S_w))

    if p is None:
        usage_warning(
            'Unspecified simultaneous contrast / assimilation "p" '
            'argument, model will not account for simultaneous chromatic '
            'contrast!')

    XYZ_p = tstack([X_p, Y_p, Z_p])

    # Computing luminance level adaptation factor :math:`F_L`.
    F_L = luminance_level_adaptation_factor(L_A)

    # Computing test sample chromatic adaptation.
    rgb_a = chromatic_adaptation(XYZ, XYZ_w, XYZ_b, L_A, F_L, XYZ_p, p,
                                 helson_judd_effect, discount_illuminant)

    # Computing reference white chromatic adaptation.
    rgb_aw = chromatic_adaptation(XYZ_w, XYZ_w, XYZ_b, L_A, F_L, XYZ_p, p,
                                  helson_judd_effect, discount_illuminant)

    # Computing opponent colour dimensions.
    # Computing achromatic post adaptation signals.
    A_a = achromatic_post_adaptation_signal(rgb_a)
    A_aw = achromatic_post_adaptation_signal(rgb_aw)

    # Computing colour difference signals.
    C = colour_difference_signals(rgb_a)
    C_w = colour_difference_signals(rgb_aw)

    # -------------------------------------------------------------------------
    # Computing the *hue* angle :math:`h_s`.
    # -------------------------------------------------------------------------
    h = hue_angle(C)
    # hue_w = hue_angle(C_w)
    # TODO: Implement hue quadrature & composition computation.

    # -------------------------------------------------------------------------
    # Computing the correlate of *saturation* :math:`s`.
    # -------------------------------------------------------------------------
    # Computing eccentricity factors.
    e_s = eccentricity_factor(h)

    # Computing low luminance tritanopia factor :math:`F_t`.
    F_t = low_luminance_tritanopia_factor(L_A)

    M_yb = yellowness_blueness_response(C, e_s, surround.N_c, N_cb, F_t)
    M_rg = redness_greenness_response(C, e_s, surround.N_c, N_cb)
    M_yb_w = yellowness_blueness_response(C_w, e_s, surround.N_c, N_cb, F_t)
    M_rg_w = redness_greenness_response(C_w, e_s, surround.N_c, N_cb)

    # Computing overall chromatic response.
    M = overall_chromatic_response(M_yb, M_rg)
    M_w = overall_chromatic_response(M_yb_w, M_rg_w)

    s = saturation_correlate(M, rgb_a)

    # -------------------------------------------------------------------------
    # Computing the correlate of *brightness* :math:`Q`.
    # -------------------------------------------------------------------------
    # Computing achromatic signal :math:`A`.
    A = achromatic_signal(L_AS, S, S_w, N_bb, A_a)
    A_w = achromatic_signal(L_AS, S_w, S_w, N_bb, A_aw)

    Q = brightness_correlate(A, A_w, M, surround.N_b)
    brightness_w = brightness_correlate(A_w, A_w, M_w, surround.N_b)
    # TODO: Implement whiteness-blackness :math:`Q_{wb}` computation.

    # -------------------------------------------------------------------------
    # Computing the correlate of *Lightness* :math:`J`.
    # -------------------------------------------------------------------------
    J = lightness_correlate(Y_b, Y_w, Q, brightness_w)

    # -------------------------------------------------------------------------
    # Computing the correlate of *chroma* :math:`C_{94}`.
    # -------------------------------------------------------------------------
    C_94 = chroma_correlate(s, Y_b, Y_w, Q, brightness_w)

    # -------------------------------------------------------------------------
    # Computing the correlate of *colourfulness* :math:`M_{94}`.
    # -------------------------------------------------------------------------
    M_94 = colourfulness_correlate(F_L, C_94)

    return Hunt_Specification(J, C_94, from_range_degrees(h), s, Q, M_94, None,
                              None)
示例#11
0
文件: cam16.py 项目: yixw/colour
def XYZ_to_CAM16(XYZ,
                 XYZ_w,
                 L_A,
                 Y_b,
                 surround=CAM16_VIEWING_CONDITIONS['Average'],
                 discount_illuminant=False):
    """
    Computes the *CAM16* colour appearance model correlates from given
    *CIE XYZ* tristimulus values.

    This is the *forward* implementation.

    Parameters
    ----------
    XYZ : array_like
        *CIE XYZ* tristimulus values of test sample / stimulus.
    XYZ_w : array_like
        *CIE XYZ* tristimulus values of reference white.
    L_A : numeric or array_like
        Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`, (often taken
        to be 20% of the luminance of a white object in the scene).
    Y_b : numeric or array_like
        Relative luminance of background :math:`Y_b` in :math:`cd/m^2`.
    surround : CAM16_InductionFactors, optional
        Surround viewing conditions induction factors.
    discount_illuminant : bool, optional
        Truth value indicating if the illuminant should be discounted.

    Returns
    -------
    CAM16_Specification
        *CAM16* colour appearance model specification.

    Notes
    -----

    +---------------------------+-----------------------+---------------+
    | **Domain**                | **Scale - Reference** | **Scale - 1** |
    +===========================+=======================+===============+
    | ``XYZ``                   | [0, 100]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+
    | ``XYZ_w``                 | [0, 100]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+

    +---------------------------+-----------------------+---------------+
    | **Range**                 | **Scale - Reference** | **Scale - 1** |
    +===========================+=======================+===============+
    | ``CAM16_specification.J`` | [0, 100]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+
    | ``CAM16_specification.C`` | [0, 100]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+
    | ``CAM16_specification.h`` | [0, 360]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+
    | ``CAM16_specification.s`` | [0, 100]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+
    | ``CAM16_specification.Q`` | [0, 100]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+
    | ``CAM16_specification.M`` | [0, 100]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+
    | ``CAM16_specification.H`` | [0, 360]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+

    References
    ----------
    :cite:`Li2017`

    Examples
    --------
    >>> XYZ = np.array([19.01, 20.00, 21.78])
    >>> XYZ_w = np.array([95.05, 100.00, 108.88])
    >>> L_A = 318.31
    >>> Y_b = 20.0
    >>> surround = CAM16_VIEWING_CONDITIONS['Average']
    >>> XYZ_to_CAM16(XYZ, XYZ_w, L_A, Y_b, surround)  # doctest: +ELLIPSIS
    CAM16_Specification(J=41.7312079..., C=0.1033557..., h=217.0679597..., \
s=2.3450150..., Q=195.3717089..., M=0.1074367..., H=275.5949861..., HC=None)
    """

    XYZ = to_domain_100(XYZ)
    XYZ_w = to_domain_100(XYZ_w)
    _X_w, Y_w, _Z_w = tsplit(XYZ_w)
    L_A = as_float_array(L_A)
    Y_b = as_float_array(Y_b)

    # Step 0
    # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values.
    RGB_w = dot_vector(M_16, XYZ_w)

    # Computing degree of adaptation :math:`D`.
    D = (np.clip(degree_of_adaptation(surround.F, L_A), 0, 1)
         if not discount_illuminant else np.ones(L_A.shape))

    n, F_L, N_bb, N_cb, z = tsplit(
        viewing_condition_dependent_parameters(Y_b, Y_w, L_A))

    D_RGB = (D[..., np.newaxis] * Y_w[..., np.newaxis] / RGB_w + 1 -
             D[..., np.newaxis])
    RGB_wc = D_RGB * RGB_w

    # Applying forward post-adaptation non linear response compression.
    RGB_aw = post_adaptation_non_linear_response_compression_forward(
        RGB_wc, F_L)

    # Computing achromatic responses for the whitepoint.
    A_w = achromatic_response_forward(RGB_aw, N_bb)

    # Step 1
    # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values.
    RGB = dot_vector(M_16, XYZ)

    # Step 2
    RGB_c = D_RGB * RGB

    # Step 3
    # Applying forward post-adaptation non linear response compression.
    RGB_a = post_adaptation_non_linear_response_compression_forward(RGB_c, F_L)

    # Step 4
    # Converting to preliminary cartesian coordinates.
    a, b = tsplit(opponent_colour_dimensions_forward(RGB_a))

    # Computing the *hue* angle :math:`h`.
    h = hue_angle(a, b)

    # Step 5
    # Computing eccentricity factor *e_t*.
    e_t = eccentricity_factor(h)

    # Computing hue :math:`h` quadrature :math:`H`.
    H = hue_quadrature(h)
    # TODO: Compute hue composition.

    # Step 6
    # Computing achromatic responses for the stimulus.
    A = achromatic_response_forward(RGB_a, N_bb)

    # Step 7
    # Computing the correlate of *Lightness* :math:`J`.
    J = lightness_correlate(A, A_w, surround.c, z)

    # Step 8
    # Computing the correlate of *brightness* :math:`Q`.
    Q = brightness_correlate(surround.c, J, A_w, F_L)

    # Step 9
    # Computing the correlate of *chroma* :math:`C`.
    C = chroma_correlate(J, n, surround.N_c, N_cb, e_t, a, b, RGB_a)

    # Computing the correlate of *colourfulness* :math:`M`.
    M = colourfulness_correlate(C, F_L)

    # Computing the correlate of *saturation* :math:`s`.
    s = saturation_correlate(M, Q)

    return CAM16_Specification(from_range_100(J), from_range_100(C),
                               from_range_degrees(h), from_range_100(s),
                               from_range_100(Q), from_range_100(M),
                               from_range_degrees(H), None)
示例#12
0
def XYZ_to_CAM16(
    XYZ: ArrayLike,
    XYZ_w: ArrayLike,
    L_A: FloatingOrArrayLike,
    Y_b: FloatingOrArrayLike,
    surround: Union[
        InductionFactors_CIECAM02,
        InductionFactors_CAM16] = VIEWING_CONDITIONS_CAM16["Average"],
    discount_illuminant: Boolean = False,
) -> CAM_Specification_CAM16:
    """
    Compute the *CAM16* colour appearance model correlates from given
    *CIE XYZ* tristimulus values.

    Parameters
    ----------
    XYZ
        *CIE XYZ* tristimulus values of test sample / stimulus.
    XYZ_w
        *CIE XYZ* tristimulus values of reference white.
    L_A
        Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`, (often taken
        to be 20% of the luminance of a white object in the scene).
    Y_b
        Luminous factor of background :math:`Y_b` such as
        :math:`Y_b = 100 x L_b / L_w` where :math:`L_w` is the luminance of the
        light source and :math:`L_b` is the luminance of the background. For
        viewing images, :math:`Y_b` can be the average :math:`Y` value for the
        pixels in the entire image, or frequently, a :math:`Y` value of 20,
        approximate an :math:`L^*` of 50 is used.
    surround
        Surround viewing conditions induction factors.
    discount_illuminant
        Truth value indicating if the illuminant should be discounted.

    Returns
    -------
    :class:`colour.CAM_Specification_CAM16`
        *CAM16* colour appearance model specification.

    Notes
    -----
    +------------+-----------------------+---------------+
    | **Domain** | **Scale - Reference** | **Scale - 1** |
    +============+=======================+===============+
    | ``XYZ``    | [0, 100]              | [0, 1]        |
    +------------+-----------------------+---------------+
    | ``XYZ_w``  | [0, 100]              | [0, 1]        |
    +------------+-----------------------+---------------+

    +-------------------------------+-----------------------+---------------+
    | **Range**                     | **Scale - Reference** | **Scale - 1** |
    +===============================+=======================+===============+
    | ``CAM_Specification_CAM16.J`` | [0, 100]              | [0, 1]        |
    +-------------------------------+-----------------------+---------------+
    | ``CAM_Specification_CAM16.C`` | [0, 100]              | [0, 1]        |
    +-------------------------------+-----------------------+---------------+
    | ``CAM_Specification_CAM16.h`` | [0, 360]              | [0, 1]        |
    +-------------------------------+-----------------------+---------------+
    | ``CAM_Specification_CAM16.s`` | [0, 100]              | [0, 1]        |
    +-------------------------------+-----------------------+---------------+
    | ``CAM_Specification_CAM16.Q`` | [0, 100]              | [0, 1]        |
    +-------------------------------+-----------------------+---------------+
    | ``CAM_Specification_CAM16.M`` | [0, 100]              | [0, 1]        |
    +-------------------------------+-----------------------+---------------+
    | ``CAM_Specification_CAM16.H`` | [0, 400]              | [0, 1]        |
    +-------------------------------+-----------------------+---------------+

    References
    ----------
    :cite:`Li2017`

    Examples
    --------
    >>> XYZ = np.array([19.01, 20.00, 21.78])
    >>> XYZ_w = np.array([95.05, 100.00, 108.88])
    >>> L_A = 318.31
    >>> Y_b = 20.0
    >>> surround = VIEWING_CONDITIONS_CAM16['Average']
    >>> XYZ_to_CAM16(XYZ, XYZ_w, L_A, Y_b, surround)  # doctest: +ELLIPSIS
    CAM_Specification_CAM16(J=41.7312079..., C=0.1033557..., \
h=217.0679597..., s=2.3450150..., Q=195.3717089..., M=0.1074367..., \
H=275.5949861..., HC=None)
    """

    XYZ = to_domain_100(XYZ)
    XYZ_w = to_domain_100(XYZ_w)
    _X_w, Y_w, _Z_w = tsplit(XYZ_w)
    L_A = as_float_array(L_A)
    Y_b = as_float_array(Y_b)

    # Step 0
    # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values.
    RGB_w = vector_dot(MATRIX_16, XYZ_w)

    # Computing degree of adaptation :math:`D`.
    D = (np.clip(degree_of_adaptation(surround.F, L_A), 0, 1)
         if not discount_illuminant else ones(L_A.shape))

    n, F_L, N_bb, N_cb, z = viewing_condition_dependent_parameters(
        Y_b, Y_w, L_A)

    D_RGB = (D[..., np.newaxis] * Y_w[..., np.newaxis] / RGB_w + 1 -
             D[..., np.newaxis])
    RGB_wc = D_RGB * RGB_w

    # Applying forward post-adaptation non-linear response compression.
    RGB_aw = post_adaptation_non_linear_response_compression_forward(
        RGB_wc, F_L)

    # Computing achromatic responses for the whitepoint.
    A_w = achromatic_response_forward(RGB_aw, N_bb)

    # Step 1
    # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values.
    RGB = vector_dot(MATRIX_16, XYZ)

    # Step 2
    RGB_c = D_RGB * RGB

    # Step 3
    # Applying forward post-adaptation non-linear response compression.
    RGB_a = post_adaptation_non_linear_response_compression_forward(RGB_c, F_L)

    # Step 4
    # Converting to preliminary cartesian coordinates.
    a, b = tsplit(opponent_colour_dimensions_forward(RGB_a))

    # Computing the *hue* angle :math:`h`.
    h = hue_angle(a, b)

    # Step 5
    # Computing eccentricity factor *e_t*.
    e_t = eccentricity_factor(h)

    # Computing hue :math:`h` quadrature :math:`H`.
    H = hue_quadrature(h)
    # TODO: Compute hue composition.

    # Step 6
    # Computing achromatic responses for the stimulus.
    A = achromatic_response_forward(RGB_a, N_bb)

    # Step 7
    # Computing the correlate of *Lightness* :math:`J`.
    J = lightness_correlate(A, A_w, surround.c, z)

    # Step 8
    # Computing the correlate of *brightness* :math:`Q`.
    Q = brightness_correlate(surround.c, J, A_w, F_L)

    # Step 9
    # Computing the correlate of *chroma* :math:`C`.
    C = chroma_correlate(J, n, surround.N_c, N_cb, e_t, a, b, RGB_a)

    # Computing the correlate of *colourfulness* :math:`M`.
    M = colourfulness_correlate(C, F_L)

    # Computing the correlate of *saturation* :math:`s`.
    s = saturation_correlate(M, Q)

    return CAM_Specification_CAM16(
        as_float(from_range_100(J)),
        as_float(from_range_100(C)),
        as_float(from_range_degrees(h)),
        as_float(from_range_100(s)),
        as_float(from_range_100(Q)),
        as_float(from_range_100(M)),
        as_float(from_range_degrees(H, 400)),
        None,
    )
示例#13
0
def XYZ_to_Kim2009(
    XYZ: ArrayLike,
    XYZ_w: ArrayLike,
    L_A: FloatingOrArrayLike,
    media: MediaParameters_Kim2009 = MEDIA_PARAMETERS_KIM2009["CRT Displays"],
    surround: InductionFactors_Kim2009 = VIEWING_CONDITIONS_KIM2009["Average"],
    discount_illuminant: Boolean = False,
    n_c: Floating = 0.57,
) -> CAM_Specification_Kim2009:
    """
    Compute the *Kim, Weyrich and Kautz (2009)* colour appearance model
    correlates from given *CIE XYZ* tristimulus values.

    Parameters
    ----------
    XYZ
        *CIE XYZ* tristimulus values of test sample / stimulus.
    XYZ_w
        *CIE XYZ* tristimulus values of reference white.
    L_A
        Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`, (often taken
        to be 20% of the luminance of a white object in the scene).
    media
        Media parameters.
    surround
        Surround viewing conditions induction factors.
    discount_illuminant
        Truth value indicating if the illuminant should be discounted.
    n_c
        Cone response sigmoidal curve modulating factor :math:`n_c`.

    Returns
    -------
    :class:`colour.CAM_Specification_Kim2009`
       *Kim, Weyrich and Kautz (2009)* colour appearance model specification.

    Notes
    -----
    +------------+-----------------------+---------------+
    | **Domain** | **Scale - Reference** | **Scale - 1** |
    +============+=======================+===============+
    | ``XYZ``    | [0, 100]              | [0, 1]        |
    +------------+-----------------------+---------------+
    | ``XYZ_w``  | [0, 100]              | [0, 1]        |
    +------------+-----------------------+---------------+

    +---------------------------------+-----------------------+---------------+
    | **Range**                       | **Scale - Reference** | **Scale - 1** |
    +=================================+=======================+===============+
    | ``CAM_Specification_Kim2009.J`` | [0, 100]              | [0, 1]        |
    +---------------------------------+-----------------------+---------------+
    | ``CAM_Specification_Kim2009.C`` | [0, 100]              | [0, 1]        |
    +---------------------------------+-----------------------+---------------+
    | ``CAM_Specification_Kim2009.h`` | [0, 360]              | [0, 1]        |
    +---------------------------------+-----------------------+---------------+
    | ``CAM_Specification_Kim2009.s`` | [0, 100]              | [0, 1]        |
    +---------------------------------+-----------------------+---------------+
    | ``CAM_Specification_Kim2009.Q`` | [0, 100]              | [0, 1]        |
    +---------------------------------+-----------------------+---------------+
    | ``CAM_Specification_Kim2009.M`` | [0, 100]              | [0, 1]        |
    +---------------------------------+-----------------------+---------------+
    | ``CAM_Specification_Kim2009.H`` | [0, 400]              | [0, 1]        |
    +---------------------------------+-----------------------+---------------+

    References
    ----------
    :cite:`Kim2009`

    Examples
    --------
    >>> XYZ = np.array([19.01, 20.00, 21.78])
    >>> XYZ_w = np.array([95.05, 100.00, 108.88])
    >>> L_A = 318.31
    >>> media = MEDIA_PARAMETERS_KIM2009['CRT Displays']
    >>> surround = VIEWING_CONDITIONS_KIM2009['Average']
    >>> XYZ_to_Kim2009(XYZ, XYZ_w, L_A, media, surround)
    ... # doctest: +ELLIPSIS
    CAM_Specification_Kim2009(J=28.8619089..., C=0.5592455..., \
h=219.0480667..., s=9.3837797..., Q=52.7138883..., M=0.4641738..., \
H=278.0602824..., HC=None)
    """

    XYZ = to_domain_100(XYZ)
    XYZ_w = to_domain_100(XYZ_w)
    _X_w, Y_w, _Z_w = tsplit(XYZ_w)
    L_A = as_float_array(L_A)

    # Converting *CIE XYZ* tristimulus values to *CMCCAT2000* transform
    # sharpened *RGB* values.
    RGB = vector_dot(CAT_CAT02, XYZ)
    RGB_w = vector_dot(CAT_CAT02, XYZ_w)

    # Computing degree of adaptation :math:`D`.
    D = (
        degree_of_adaptation(surround.F, L_A)
        if not discount_illuminant
        else ones(L_A.shape)
    )

    # Computing full chromatic adaptation.
    XYZ_c = full_chromatic_adaptation_forward(RGB, RGB_w, Y_w, D)
    XYZ_wc = full_chromatic_adaptation_forward(RGB_w, RGB_w, Y_w, D)

    # Converting to *Hunt-Pointer-Estevez* colourspace.
    LMS = RGB_to_rgb(XYZ_c)
    LMS_w = RGB_to_rgb(XYZ_wc)

    # Cones absolute response.
    LMS_n_c = spow(LMS, n_c)
    LMS_w_n_c = spow(LMS_w, n_c)
    L_A_n_c = spow(L_A, n_c)
    LMS_p = LMS_n_c / (LMS_n_c + L_A_n_c)
    LMS_wp = LMS_w_n_c / (LMS_w_n_c + L_A_n_c)

    # Achromatic signal :math:`A` and :math:`A_w`.
    v_A = np.array([40, 20, 1])
    A = np.sum(v_A * LMS_p, axis=-1) / 61
    A_w = np.sum(v_A * LMS_wp, axis=-1) / 61

    # Perceived *Lightness* :math:`J_p`.
    a_j, b_j, o_j, n_j = 0.89, 0.24, 0.65, 3.65
    A_A_w = A / A_w
    J_p = spow(
        (-(A_A_w - b_j) * spow(o_j, n_j)) / (A_A_w - b_j - a_j), 1 / n_j
    )

    # Computing the media dependent *Lightness* :math:`J`.
    J = 100 * (media.E * (J_p - 1) + 1)

    # Computing the correlate of *brightness* :math:`Q`.
    n_q = 0.1308
    Q = J * spow(Y_w, n_q)

    # Opponent signals :math:`a` and :math:`b`.
    a = (1 / 11) * np.sum(np.array([11, -12, 1]) * LMS_p, axis=-1)
    b = (1 / 9) * np.sum(np.array([1, 1, -2]) * LMS_p, axis=-1)

    # Computing the correlate of *chroma* :math:`C`.
    a_k, n_k = 456.5, 0.62
    C = a_k * spow(np.sqrt(a**2 + b**2), n_k)

    # Computing the correlate of *colourfulness* :math:`M`.
    a_m, b_m = 0.11, 0.61
    M = C * (a_m * np.log10(Y_w) + b_m)

    # Computing the correlate of *saturation* :math:`s`.
    s = 100 * np.sqrt(M / Q)

    # Computing the *hue* angle :math:`h`.
    h = np.degrees(np.arctan2(b, a)) % 360

    # Computing hue :math:`h` quadrature :math:`H`.
    H = hue_quadrature(h)

    return CAM_Specification_Kim2009(
        as_float(from_range_100(J)),
        as_float(from_range_100(C)),
        as_float(from_range_degrees(h)),
        as_float(from_range_100(s)),
        as_float(from_range_100(Q)),
        as_float(from_range_100(M)),
        as_float(from_range_degrees(H, 400)),
        None,
    )
示例#14
0
def Jab_to_JCh(Jab: ArrayLike) -> NDArray:
    """
    Convert from *Jab* colour representation to *JCh* colour representation.

    This definition is used to perform conversion from *CIE L\\*a\\*b\\**
    colourspace to *CIE L\\*C\\*Hab* colourspace and for other similar
    conversions. It implements a generic transformation from *Lightness*
    :math:`J`, :math:`a` and :math:`b` opponent colour dimensions to the
    correlates of *Lightness* :math:`J`, chroma :math:`C` and hue angle
    :math:`h`.

    Parameters
    ----------
    Jab
        *Jab* colour representation array.

    Returns
    -------
    :class:`numpy.ndarray`
        *JCh* colour representation array.

    Notes
    -----
    +------------+-----------------------+-----------------+
    | **Domain** | **Scale - Reference** | **Scale - 1**   |
    +============+=======================+=================+
    | ``Jab``    | ``J`` : [0, 100]      | ``J`` : [0, 1]  |
    |            |                       |                 |
    |            | ``a`` : [-100, 100]   | ``a`` : [-1, 1] |
    |            |                       |                 |
    |            | ``b`` : [-100, 100]   | ``b`` : [-1, 1] |
    +------------+-----------------------+-----------------+

    +------------+-----------------------+-----------------+
    | **Range**  | **Scale - Reference** | **Scale - 1**   |
    +============+=======================+=================+
    | ``JCh``    | ``J``  : [0, 100]     | ``J`` : [0, 1]  |
    |            |                       |                 |
    |            | ``C``  : [0, 100]     | ``C`` : [0, 1]  |
    |            |                       |                 |
    |            | ``h`` : [0, 360]      | ``h`` : [0, 1]  |
    +------------+-----------------------+-----------------+

    References
    ----------
    :cite:`CIETC1-482004m`

    Examples
    --------
    >>> Jab = np.array([41.52787529, 52.63858304, 26.92317922])
    >>> Jab_to_JCh(Jab)  # doctest: +ELLIPSIS
    array([ 41.5278752...,  59.1242590...,  27.0884878...])
    """

    L, a, b = tsplit(Jab)

    C, H = tsplit(cartesian_to_polar(tstack([a, b])))

    JCh = tstack([L, C, from_range_degrees(np.degrees(H) % 360)])

    return JCh
示例#15
0
def XYZ_to_CIECAM02(XYZ,
                    XYZ_w,
                    L_A,
                    Y_b,
                    surround=CIECAM02_VIEWING_CONDITIONS['Average'],
                    discount_illuminant=False):
    """
    Computes the *CIECAM02* colour appearance model correlates from given
    *CIE XYZ* tristimulus values.

    This is the *forward* implementation.

    Parameters
    ----------
    XYZ : array_like
        *CIE XYZ* tristimulus values of test sample / stimulus.
    XYZ_w : array_like
        *CIE XYZ* tristimulus values of reference white.
    L_A : numeric or array_like
        Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`, (often taken
        to be 20% of the luminance of a white object in the scene).
    Y_b : numeric or array_like
        Relative luminance of background :math:`Y_b` in :math:`cd/m^2`.
    surround : CIECAM02_InductionFactors, optional
        Surround viewing conditions induction factors.
    discount_illuminant : bool, optional
        Truth value indicating if the illuminant should be discounted.

    Returns
    -------
    CIECAM02_Specification
        *CIECAM02* colour appearance model specification.

    Notes
    -----

    +------------------------------+-----------------------+---------------+
    | **Domain**                   | **Scale - Reference** | **Scale - 1** |
    +==============================+=======================+===============+
    | ``XYZ``                      | [0, 100]              | [0, 1]        |
    +------------------------------+-----------------------+---------------+
    | ``XYZ_w``                    | [0, 100]              | [0, 1]        |
    +------------------------------+-----------------------+---------------+

    +------------------------------+-----------------------+---------------+
    | **Range**                    | **Scale - Reference** | **Scale - 1** |
    +==============================+=======================+===============+
    | ``CIECAM02_specification.h`` | [0, 360]              | [0, 1]        |
    +------------------------------+-----------------------+---------------+
    | ``CIECAM02_specification.H`` | [0, 360]              | [0, 1]        |
    +------------------------------+-----------------------+---------------+

    References
    ----------
    :cite:`Fairchild2004c`, :cite:`Luo2013`, :cite:`Moroneya`,
    :cite:`Wikipedia2007a`

    Examples
    --------
    >>> XYZ = np.array([19.01, 20.00, 21.78])
    >>> XYZ_w = np.array([95.05, 100.00, 108.88])
    >>> L_A = 318.31
    >>> Y_b = 20.0
    >>> surround = CIECAM02_VIEWING_CONDITIONS['Average']
    >>> XYZ_to_CIECAM02(XYZ, XYZ_w, L_A, Y_b, surround)  # doctest: +ELLIPSIS
    CIECAM02_Specification(J=41.7310911..., C=0.1047077..., h=219.0484326..., \
s=2.3603053..., Q=195.3713259..., M=0.1088421..., H=278.0607358..., HC=None)
    """

    XYZ = to_domain_100(XYZ)
    XYZ_w = to_domain_100(XYZ_w)
    _X_w, Y_w, _Z_w = tsplit(XYZ_w)
    L_A = as_float_array(L_A)
    Y_b = as_float_array(Y_b)

    n, F_L, N_bb, N_cb, z = tsplit(
        viewing_condition_dependent_parameters(Y_b, Y_w, L_A))

    # Converting *CIE XYZ* tristimulus values to *CMCCAT2000* transform
    # sharpened *RGB* values.
    RGB = dot_vector(CAT02_CAT, XYZ)
    RGB_w = dot_vector(CAT02_CAT, XYZ_w)

    # Computing degree of adaptation :math:`D`.
    D = (degree_of_adaptation(surround.F, L_A)
         if not discount_illuminant else np.ones(L_A.shape))

    # Computing full chromatic adaptation.
    RGB_c = full_chromatic_adaptation_forward(RGB, RGB_w, Y_w, D)
    RGB_wc = full_chromatic_adaptation_forward(RGB_w, RGB_w, Y_w, D)

    # Converting to *Hunt-Pointer-Estevez* colourspace.
    RGB_p = RGB_to_rgb(RGB_c)
    RGB_pw = RGB_to_rgb(RGB_wc)

    # Applying forward post-adaptation non linear response compression.
    RGB_a = post_adaptation_non_linear_response_compression_forward(RGB_p, F_L)
    RGB_aw = post_adaptation_non_linear_response_compression_forward(
        RGB_pw, F_L)

    # Converting to preliminary cartesian coordinates.
    a, b = tsplit(opponent_colour_dimensions_forward(RGB_a))

    # Computing the *hue* angle :math:`h`.
    h = hue_angle(a, b)

    # Computing hue :math:`h` quadrature :math:`H`.
    H = hue_quadrature(h)
    # TODO: Compute hue composition.

    # Computing eccentricity factor *e_t*.
    e_t = eccentricity_factor(h)

    # Computing achromatic responses for the stimulus and the whitepoint.
    A = achromatic_response_forward(RGB_a, N_bb)
    A_w = achromatic_response_forward(RGB_aw, N_bb)

    # Computing the correlate of *Lightness* :math:`J`.
    J = lightness_correlate(A, A_w, surround.c, z)

    # Computing the correlate of *brightness* :math:`Q`.
    Q = brightness_correlate(surround.c, J, A_w, F_L)

    # Computing the correlate of *chroma* :math:`C`.
    C = chroma_correlate(J, n, surround.N_c, N_cb, e_t, a, b, RGB_a)

    # Computing the correlate of *colourfulness* :math:`M`.
    M = colourfulness_correlate(C, F_L)

    # Computing the correlate of *saturation* :math:`s`.
    s = saturation_correlate(M, Q)

    return CIECAM02_Specification(J, C, from_range_degrees(h), s, Q, M,
                                  from_range_degrees(H), None)
示例#16
0
def XYZ_to_ZCAM(
    XYZ: ArrayLike,
    XYZ_w: ArrayLike,
    L_A: FloatingOrArrayLike,
    Y_b: FloatingOrArrayLike,
    surround: InductionFactors_ZCAM = VIEWING_CONDITIONS_ZCAM["Average"],
    discount_illuminant: Boolean = False,
) -> CAM_Specification_ZCAM:
    """
    Compute the *ZCAM* colour appearance model correlates from given *CIE XYZ*
    tristimulus values.

    Parameters
    ----------
    XYZ
        Absolute *CIE XYZ* tristimulus values of test sample / stimulus.
    XYZ_w
        Absolute *CIE XYZ* tristimulus values of the white under reference
        illuminant.
    L_A
        Test adapting field *luminance* :math:`L_A` in :math:`cd/m^2` such as
        :math:`L_A = L_w * Y_b / 100` (where :math:`L_w` is luminance of the
        reference white and :math:`Y_b` is the background luminance factor).
    Y_b
        Luminous factor of background :math:`Y_b` such as
        :math:`Y_b = 100 * L_b / L_w` where :math:`L_w` is the luminance of the
        light source and :math:`L_b` is the luminance of the background. For
        viewing images, :math:`Y_b` can be the average :math:`Y` value for the
        pixels in the entire image, or frequently, a :math:`Y` value of 20,
        approximate an :math:`L^*` of 50 is used.
    surround
        Surround viewing conditions induction factors.
    discount_illuminant
        Truth value indicating if the illuminant should be discounted.

    Returns
    -------
    :class:`colour.CAM_Specification_ZCAM`
       *ZCAM* colour appearance model specification.

    Warnings
    --------
    The underlying *SMPTE ST 2084:2014* transfer function is an absolute
    transfer function.

    Notes
    -----
    -   *Safdar, Hardeberg and Luo (2021)* does not specify how the chromatic
        adaptation to *CIE Standard Illuminant D65* in *Step 0* should be
        performed. A one-step *Von Kries* chromatic adaptation transform is not
        symmetrical or transitive when a degree of adaptation is involved.
        *Safdar, Hardeberg and Luo (2018)* uses *Zhai and Luo (2018)* two-steps
        chromatic adaptation transform, thus it seems sensible to adopt this
        transform for the *ZCAM* colour appearance model until more information
        is available. It is worth noting that a one-step *Von Kries* chromatic
        adaptation transform with support for degree of adaptation produces
        values closer to the supplemental document compared to the
        *Zhai and Luo (2018)* two-steps chromatic adaptation transform but then
        the *ZCAM* colour appearance model does not round-trip properly.
    -   The underlying *SMPTE ST 2084:2014* transfer function is an absolute
        transfer function, thus the domain and range values for the *Reference*
        and *1* scales are only indicative that the data is not affected by
        scale transformations.

    +------------+-----------------------+---------------+
    | **Domain** | **Scale - Reference** | **Scale - 1** |
    +============+=======================+===============+
    | ``XYZ``    | [UN]                  | [UN]          |
    +------------+-----------------------+---------------+
    | ``XYZ_tw`` | [UN]                  | [UN]          |
    +------------+-----------------------+---------------+
    | ``XYZ_rw`` | [UN]                  | [UN]          |
    +------------+-----------------------+---------------+

    +-------------------------------+-----------------------+---------------+
    | **Range**                     | **Scale - Reference** | **Scale - 1** |
    +===============================+=======================+===============+
    | ``CAM_Specification_ZCAM.J``  | [UN]                  | [0, 1]        |
    +-------------------------------+-----------------------+---------------+
    | ``CAM_Specification_ZCAM.C``  | [UN]                  | [0, 1]        |
    +-------------------------------+-----------------------+---------------+
    | ``CAM_Specification_ZCAM.h``  | [0, 360]              | [0, 1]        |
    +-------------------------------+-----------------------+---------------+
    | ``CAM_Specification_ZCAM.s``  | [UN]                  | [0, 1]        |
    +-------------------------------+-----------------------+---------------+
    | ``CAM_Specification_ZCAM.Q``  | [UN]                  | [0, 1]        |
    +-------------------------------+-----------------------+---------------+
    | ``CAM_Specification_ZCAM.M``  | [UN]                  | [0, 1]        |
    +-------------------------------+-----------------------+---------------+
    | ``CAM_Specification_ZCAM.H``  | [0, 400]              | [0, 1]        |
    +-------------------------------+-----------------------+---------------+
    | ``CAM_Specification_ZCAM.HC`` | [UN]                  | [0, 1]        |
    +-------------------------------+-----------------------+---------------+
    | ``CAM_Specification_ZCAM.V``  | [UN]                  | [0, 1]        |
    +-------------------------------+-----------------------+---------------+
    | ``CAM_Specification_ZCAM.K``  | [UN]                  | [0, 1]        |
    +-------------------------------+-----------------------+---------------+
    | ``CAM_Specification_ZCAM.H``  | [UN]                  | [0, 1]        |
    +-------------------------------+-----------------------+---------------+

    References
    ----------
    :cite:`Safdar2018`, :cite:`Safdar2021`, :cite:`Zhai2018`

    Examples
    --------
    >>> XYZ = np.array([185, 206, 163])
    >>> XYZ_w = np.array([256, 264, 202])
    >>> L_A = 264
    >>> Y_b = 100
    >>> surround = VIEWING_CONDITIONS_ZCAM['Average']
    >>> XYZ_to_ZCAM(XYZ, XYZ_w, L_A, Y_b, surround)
    ... # doctest: +ELLIPSIS
    CAM_Specification_ZCAM(J=92.2504437..., C=3.0216926..., h=196.3245737..., \
s=19.1319556..., Q=321.3408463..., M=10.5256217..., H=237.6114442..., \
HC=None, V=34.7006776..., K=25.8835968..., W=91.6821728...)
    """

    XYZ = to_domain_1(XYZ)
    XYZ_w = to_domain_1(XYZ_w)
    _X_w, Y_w, _Z_w = tsplit(XYZ_w)
    L_A = as_float_array(L_A)
    Y_b = as_float_array(Y_b)

    F_s, _F, _c, _N_c = surround

    # Step 0 (Forward) - Chromatic adaptation from reference illuminant to
    # "CIE Standard Illuminant D65" illuminant using "CAT02".
    # Computing degree of adaptation :math:`D`.
    D = (degree_of_adaptation(surround.F, L_A)
         if not discount_illuminant else ones(L_A.shape))

    XYZ_D65 = chromatic_adaptation_Zhai2018(XYZ,
                                            XYZ_w,
                                            TVS_D65,
                                            D,
                                            D,
                                            transform="CAT02")

    # Step 1 (Forward) - Computing factors related with viewing conditions and
    # independent of the test stimulus.
    # Background factor :math:`F_b`
    F_b = np.sqrt(Y_b / Y_w)
    # Luminance level adaptation factor :math:`F_L`
    F_L = 0.171 * spow(L_A, 1 / 3) * (1 - np.exp(-48 / 9 * L_A))

    # Step 2 (Forward) - Computing achromatic response (:math:`I_z` and
    # :math:`I_{z,w}`), redness-greenness (:math:`a_z` and :math:`a_{z,w}`),
    # and yellowness-blueness (:math:`b_z`, :math:`b_{z,w}`).
    with domain_range_scale("ignore"):
        I_z, a_z, b_z = tsplit(XYZ_to_Izazbz(XYZ_D65, method="Safdar 2021"))
        I_z_w, _a_z_w, b_z_w = tsplit(
            XYZ_to_Izazbz(XYZ_w, method="Safdar 2021"))

    # Step 3 (Forward) - Computing hue angle :math:`h_z`
    h_z = hue_angle(a_z, b_z)

    # Step 4 (Forward) - Computing hue quadrature :math:`H`.
    H = hue_quadrature(h_z)

    # Computing eccentricity factor :math:`e_z`.
    e_z = 1.015 + np.cos(np.radians(89.038 + h_z % 360))

    # Step 5 (Forward) - Computing brightness :math:`Q_z`,
    # lightness :math:`J_z`, colourfulness :math`M_z`, and chroma :math:`C_z`
    Q_z_p = (1.6 * F_s) / F_b**0.12
    Q_z_m = F_s**2.2 * F_b**0.5 * spow(F_L, 0.2)
    Q_z = 2700 * spow(I_z, Q_z_p) * Q_z_m
    Q_z_w = 2700 * spow(I_z_w, Q_z_p) * Q_z_m

    J_z = 100 * (Q_z / Q_z_w)

    M_z = (100 * (a_z**2 + b_z**2)**0.37 *
           ((spow(e_z, 0.068) * spow(F_L, 0.2)) /
            (F_b**0.1 * spow(I_z_w, 0.78))))

    C_z = 100 * (M_z / Q_z_w)

    # Step 6 (Forward) - Computing saturation :math:`S_z`,
    # vividness :math:`V_z`, blackness :math:`K_z`, and whiteness :math:`W_z`.
    S_z = 100 * spow(F_L, 0.6) * np.sqrt(M_z / Q_z)

    V_z = np.sqrt((J_z - 58)**2 + 3.4 * C_z**2)

    K_z = 100 - 0.8 * np.sqrt(J_z**2 + 8 * C_z**2)

    W_z = 100 - np.sqrt((100 - J_z)**2 + C_z**2)

    return CAM_Specification_ZCAM(
        as_float(from_range_1(J_z)),
        as_float(from_range_1(C_z)),
        as_float(from_range_degrees(h_z)),
        as_float(from_range_1(S_z)),
        as_float(from_range_1(Q_z)),
        as_float(from_range_1(M_z)),
        as_float(from_range_degrees(H, 400)),
        None,
        as_float(from_range_1(V_z)),
        as_float(from_range_1(K_z)),
        as_float(from_range_1(W_z)),
    )
示例#17
0
def XYZ_to_RLAB(XYZ,
                XYZ_n,
                Y_n,
                sigma=RLAB_VIEWING_CONDITIONS['Average'],
                D=RLAB_D_FACTOR['Hard Copy Images']):
    """
    Computes the *RLAB* model color appearance correlates.

    Parameters
    ----------
    XYZ : array_like
        *CIE XYZ* tristimulus values of test sample / stimulus.
    XYZ_n : array_like
        *CIE XYZ* tristimulus values of reference white.
    Y_n : numeric or array_like
        Absolute adapting luminance in :math:`cd/m^2`.
    sigma : numeric or array_like, optional
        Relative luminance of the surround, see
        :attr:`colour.RLAB_VIEWING_CONDITIONS` for reference.
    D : numeric or array_like, optional
        *Discounting-the-Illuminant* factor normalised to domain [0, 1].

    Returns
    -------
    RLAB_Specification
        *RLAB* colour appearance model specification.

    Notes
    -----

    +--------------------------+-----------------------+---------------+
    | **Domain**               | **Scale - Reference** | **Scale - 1** |
    +==========================+=======================+===============+
    | ``XYZ``                  | [0, 100]              | [0, 1]        |
    +--------------------------+-----------------------+---------------+
    | ``XYZ_n``                | [0, 100]              | [0, 1]        |
    +--------------------------+-----------------------+---------------+

    +--------------------------+-----------------------+---------------+
    | **Range**                | **Scale - Reference** | **Scale - 1** |
    +==========================+=======================+===============+
    | ``RLAB_Specification.h`` | [0, 360]              | [0, 1]        |
    +--------------------------+-----------------------+---------------+

    References
    ----------
    :cite:`Fairchild1996a`, :cite:`Fairchild2013w`

    Examples
    --------
    >>> XYZ = np.array([19.01, 20.00, 21.78])
    >>> XYZ_n = np.array([109.85, 100, 35.58])
    >>> Y_n = 31.83
    >>> sigma = RLAB_VIEWING_CONDITIONS['Average']
    >>> D = RLAB_D_FACTOR['Hard Copy Images']
    >>> XYZ_to_RLAB(XYZ, XYZ_n, Y_n, sigma, D)  # doctest: +ELLIPSIS
    RLAB_Specification(J=49.8347069..., C=54.8700585..., h=286.4860208..., \
s=1.1010410..., HC=None, a=15.5711021..., b=-52.6142956...)
    """

    XYZ = to_domain_100(XYZ)
    XYZ_n = to_domain_100(XYZ_n)
    Y_n = as_float_array(Y_n)
    D = as_float_array(D)
    sigma = as_float_array(sigma)

    # Converting to cone responses.
    LMS_n = XYZ_to_rgb(XYZ_n)

    # Computing the :math:`A` matrix.
    LMS_l_E = (3 * LMS_n) / (LMS_n[0] + LMS_n[1] + LMS_n[2])
    LMS_p_L = ((1 + spow(Y_n[..., np.newaxis], 1 / 3) + LMS_l_E) /
               (1 + spow(Y_n[..., np.newaxis], 1 / 3) + (1 / LMS_l_E)))
    LMS_a_L = (LMS_p_L + D[..., np.newaxis] * (1 - LMS_p_L)) / LMS_n

    aR = row_as_diagonal(LMS_a_L)
    M = dot_matrix(dot_matrix(R_MATRIX, aR), XYZ_TO_HPE_MATRIX)
    XYZ_ref = dot_vector(M, XYZ)

    X_ref, Y_ref, Z_ref = tsplit(XYZ_ref)

    # Computing the correlate of *Lightness* :math:`L^R`.
    LR = 100 * spow(Y_ref, sigma)

    # Computing opponent colour dimensions :math:`a^R` and :math:`b^R`.
    aR = 430 * (spow(X_ref, sigma) - spow(Y_ref, sigma))
    bR = 170 * (spow(Y_ref, sigma) - spow(Z_ref, sigma))

    # Computing the *hue* angle :math:`h^R`.
    hR = np.degrees(np.arctan2(bR, aR)) % 360
    # TODO: Implement hue composition computation.

    # Computing the correlate of *chroma* :math:`C^R`.
    CR = np.hypot(aR, bR)

    # Computing the correlate of *saturation* :math:`s^R`.
    sR = CR / LR

    return RLAB_Specification(LR, CR, from_range_degrees(hR), sR, None, aR, bR)
示例#18
0
def XYZ_to_Nayatani95(XYZ, XYZ_n, Y_o, E_o, E_or, n=1):
    """
    Computes the *Nayatani (1995)* colour appearance model correlates.

    Parameters
    ----------
    XYZ : array_like
        *CIE XYZ* tristimulus values of test sample / stimulus.
    XYZ_n : array_like
        *CIE XYZ* tristimulus values of reference white.
    Y_o : numeric or array_like
        Luminance factor :math:`Y_o` of achromatic background as percentage
        normalised to domain [0.18, 1.0] in **'Reference'** domain-range scale.
    E_o : numeric or array_like
        Illuminance :math:`E_o` of the viewing field in lux.
    E_or : numeric or array_like
        Normalising illuminance :math:`E_{or}` in lux usually normalised to
        domain [1000, 3000].
    n : numeric or array_like, optional
        Noise term used in the non linear chromatic adaptation model.

    Returns
    -------
    Nayatani95_Specification
        *Nayatani (1995)* colour appearance model specification.

    Notes
    -----

    +--------------------------------+-----------------------+---------------+
    | **Domain**                     | **Scale - Reference** | **Scale - 1** |
    +================================+=======================+===============+
    | ``XYZ``                        | [0, 100]              | [0, 1]        |
    +--------------------------------+-----------------------+---------------+
    | ``XYZ_n``                      | [0, 100]              | [0, 1]        |
    +--------------------------------+-----------------------+---------------+

    +--------------------------------+-----------------------+---------------+
    | **Range**                      | **Scale - Reference** | **Scale - 1** |
    +================================+=======================+===============+
    | ``Nayatani95_Specification.h`` | [0, 360]              | [0, 1]        |
    +--------------------------------+-----------------------+---------------+

    References
    ----------
    :cite:`Fairchild2013ba`, :cite:`Nayatani1995a`

    Examples
    --------
    >>> XYZ = np.array([19.01, 20.00, 21.78])
    >>> XYZ_n = np.array([95.05, 100.00, 108.88])
    >>> Y_o = 20.0
    >>> E_o = 5000.0
    >>> E_or = 1000.0
    >>> XYZ_to_Nayatani95(XYZ, XYZ_n, Y_o, E_o, E_or)  # doctest: +ELLIPSIS
    Nayatani95_Specification(L_star_P=49.9998829..., C=0.0133550..., \
h=257.5232268..., s=0.0133550..., Q=62.6266734..., M=0.0167262..., H=None, \
HC=None, L_star_N=50.0039154...)
    """

    XYZ = to_domain_100(XYZ)
    XYZ_n = to_domain_100(XYZ_n)
    Y_o = as_float_array(Y_o)
    E_o = as_float_array(E_o)
    E_or = as_float_array(E_or)

    # Computing adapting luminance :math:`L_o` and normalising luminance
    # :math:`L_{or}` in in :math:`cd/m^2`.
    # L_o = illuminance_to_luminance(E_o, Y_o)
    L_or = illuminance_to_luminance(E_or, Y_o)

    # Computing :math:`\\xi` :math:`\\eta`, :math:`\\zeta` values.
    xez = intermediate_values(XYZ_to_xy(XYZ_n / 100))
    xi, eta, _zeta = tsplit(xez)

    # Computing adapting field cone responses.
    RGB_o = ((
        (Y_o[..., np.newaxis] * E_o[..., np.newaxis]) / (100 * np.pi)) * xez)

    # Computing stimulus cone responses.
    RGB = XYZ_to_RGB_Nayatani95(XYZ)
    R, G, _B = tsplit(RGB)

    # Computing exponential factors of the chromatic adaptation.
    bRGB_o = exponential_factors(RGB_o)
    bL_or = beta_1(L_or)

    # Computing scaling coefficients :math:`e(R)` and :math:`e(G)`
    eR = scaling_coefficient(R, xi)
    eG = scaling_coefficient(G, eta)

    # Computing opponent colour dimensions.
    # Computing achromatic response :math:`Q`:
    Q_response = achromatic_response(RGB, bRGB_o, xez, bL_or, eR, eG, n)

    # Computing tritanopic response :math:`t`:
    t_response = tritanopic_response(RGB, bRGB_o, xez, n)

    # Computing protanopic response :math:`p`:
    p_response = protanopic_response(RGB, bRGB_o, xez, n)

    # Computing the correlate of *brightness* :math:`B_r`.
    B_r = brightness_correlate(bRGB_o, bL_or, Q_response)

    # Computing *brightness* :math:`B_{rw}` of ideal white.
    brightness_ideal_white = ideal_white_brightness_correlate(
        bRGB_o, xez, bL_or, n)

    # Computing the correlate of achromatic *Lightness* :math:`L_p^\\star`.
    L_star_P = (achromatic_lightness_correlate(Q_response))

    # Computing the correlate of normalised achromatic *Lightness*
    # :math:`L_n^\\star`.
    L_star_N = (normalised_achromatic_lightness_correlate(
        B_r, brightness_ideal_white))

    # Computing the *hue* angle :math:`\\theta`.
    theta = hue_angle(p_response, t_response)
    # TODO: Implement hue quadrature & composition computation.

    # Computing the correlate of *saturation* :math:`S`.
    S_RG, S_YB = tsplit(
        saturation_components(theta, bL_or, t_response, p_response))
    S = saturation_correlate(S_RG, S_YB)

    # Computing the correlate of *chroma* :math:`C`.
    # C_RG, C_YB = tsplit(chroma_components(L_star_P, S_RG, S_YB))
    C = chroma_correlate(L_star_P, S)

    # Computing the correlate of *colourfulness* :math:`M`.
    # TODO: Investigate components usage.
    # M_RG, M_YB = tsplit(colourfulness_components(C_RG, C_YB,
    # brightness_ideal_white))
    M = colourfulness_correlate(C, brightness_ideal_white)

    return Nayatani95_Specification(L_star_P, C, from_range_degrees(theta), S,
                                    B_r, M, None, None, L_star_N)
示例#19
0
def XYZ_to_CAM16(XYZ,
                 XYZ_w,
                 L_A,
                 Y_b,
                 surround=CAM16_VIEWING_CONDITIONS['Average'],
                 discount_illuminant=False):
    """
    Computes the *CAM16* colour appearance model correlates from given
    *CIE XYZ* tristimulus values.

    This is the *forward* implementation.

    Parameters
    ----------
    XYZ : array_like
        *CIE XYZ* tristimulus values of test sample / stimulus.
    XYZ_w : array_like
        *CIE XYZ* tristimulus values of reference white.
    L_A : numeric or array_like
        Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`, (often taken
        to be 20% of the luminance of a white object in the scene).
    Y_b : numeric or array_like
        Relative luminance of background :math:`Y_b` in :math:`cd/m^2`.
    surround : CAM16_InductionFactors, optional
        Surround viewing conditions induction factors.
    discount_illuminant : bool, optional
        Truth value indicating if the illuminant should be discounted.

    Returns
    -------
    CAM16_Specification
        *CAM16* colour appearance model specification.

    Notes
    -----

    +---------------------------+-----------------------+---------------+
    | **Domain**                | **Scale - Reference** | **Scale - 1** |
    +===========================+=======================+===============+
    | ``XYZ``                   | [0, 100]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+
    | ``XYZ_w``                 | [0, 100]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+

    +---------------------------+-----------------------+---------------+
    | **Range**                 | **Scale - Reference** | **Scale - 1** |
    +===========================+=======================+===============+
    | ``CAM16_Specification.h`` | [0, 360]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+
    | ``CAM16_Specification.H`` | [0, 360]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+

    References
    ----------
    :cite:`Li2017`

    Examples
    --------
    >>> XYZ = np.array([19.01, 20.00, 21.78])
    >>> XYZ_w = np.array([95.05, 100.00, 108.88])
    >>> L_A = 318.31
    >>> Y_b = 20.0
    >>> surround = CAM16_VIEWING_CONDITIONS['Average']
    >>> XYZ_to_CAM16(XYZ, XYZ_w, L_A, Y_b, surround)  # doctest: +ELLIPSIS
    CAM16_Specification(J=41.7312079..., C=0.1033557..., h=217.0679597..., \
s=2.3450150..., Q=195.3717089..., M=0.1074367..., H=275.5949861..., HC=None)
    """

    XYZ = to_domain_100(XYZ)
    XYZ_w = to_domain_100(XYZ_w)
    _X_w, Y_w, _Z_w = tsplit(XYZ_w)
    L_A = as_float_array(L_A)
    Y_b = as_float_array(Y_b)

    # Step 0
    # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values.
    RGB_w = dot_vector(M_16, XYZ_w)

    # Computing degree of adaptation :math:`D`.
    D = (np.clip(degree_of_adaptation(surround.F, L_A), 0, 1)
         if not discount_illuminant else np.ones(L_A.shape))

    n, F_L, N_bb, N_cb, z = tsplit(
        viewing_condition_dependent_parameters(Y_b, Y_w, L_A))

    D_RGB = (D[..., np.newaxis] * Y_w[..., np.newaxis] / RGB_w + 1 -
             D[..., np.newaxis])
    RGB_wc = D_RGB * RGB_w

    # Applying forward post-adaptation non linear response compression.
    RGB_aw = post_adaptation_non_linear_response_compression_forward(
        RGB_wc, F_L)

    # Computing achromatic responses for the whitepoint.
    A_w = achromatic_response_forward(RGB_aw, N_bb)

    # Step 1
    # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values.
    RGB = dot_vector(M_16, XYZ)

    # Step 2
    RGB_c = D_RGB * RGB

    # Step 3
    # Applying forward post-adaptation non linear response compression.
    RGB_a = post_adaptation_non_linear_response_compression_forward(RGB_c, F_L)

    # Step 4
    # Converting to preliminary cartesian coordinates.
    a, b = tsplit(opponent_colour_dimensions_forward(RGB_a))

    # Computing the *hue* angle :math:`h`.
    h = hue_angle(a, b)

    # Step 5
    # Computing eccentricity factor *e_t*.
    e_t = eccentricity_factor(h)

    # Computing hue :math:`h` quadrature :math:`H`.
    H = hue_quadrature(h)
    # TODO: Compute hue composition.

    # Step 6
    # Computing achromatic responses for the stimulus.
    A = achromatic_response_forward(RGB_a, N_bb)

    # Step 7
    # Computing the correlate of *Lightness* :math:`J`.
    J = lightness_correlate(A, A_w, surround.c, z)

    # Step 8
    # Computing the correlate of *brightness* :math:`Q`.
    Q = brightness_correlate(surround.c, J, A_w, F_L)

    # Step 9
    # Computing the correlate of *chroma* :math:`C`.
    C = chroma_correlate(J, n, surround.N_c, N_cb, e_t, a, b, RGB_a)

    # Computing the correlate of *colourfulness* :math:`M`.
    M = colourfulness_correlate(C, F_L)

    # Computing the correlate of *saturation* :math:`s`.
    s = saturation_correlate(M, Q)

    return CAM16_Specification(J, C, from_range_degrees(h), s, Q, M,
                               from_range_degrees(H), None)
示例#20
0
def XYZ_to_CIECAM02(XYZ,
                    XYZ_w,
                    L_A,
                    Y_b,
                    surround=CIECAM02_VIEWING_CONDITIONS['Average'],
                    discount_illuminant=False):
    """
    Computes the *CIECAM02* colour appearance model correlates from given
    *CIE XYZ* tristimulus values.

    This is the *forward* implementation.

    Parameters
    ----------
    XYZ : array_like
        *CIE XYZ* tristimulus values of test sample / stimulus.
    XYZ_w : array_like
        *CIE XYZ* tristimulus values of reference white.
    L_A : numeric or array_like
        Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`, (often taken
        to be 20% of the luminance of a white object in the scene).
    Y_b : numeric or array_like
        Relative luminance of background :math:`Y_b` in :math:`cd/m^2`.
    surround : CIECAM02_InductionFactors, optional
        Surround viewing conditions induction factors.
    discount_illuminant : bool, optional
        Truth value indicating if the illuminant should be discounted.

    Returns
    -------
    CIECAM02_Specification
        *CIECAM02* colour appearance model specification.

    Notes
    -----

    +------------------------------+-----------------------+---------------+
    | **Domain**                   | **Scale - Reference** | **Scale - 1** |
    +==============================+=======================+===============+
    | ``XYZ``                      | [0, 100]              | [0, 1]        |
    +------------------------------+-----------------------+---------------+
    | ``XYZ_w``                    | [0, 100]              | [0, 1]        |
    +------------------------------+-----------------------+---------------+

    +------------------------------+-----------------------+---------------+
    | **Range**                    | **Scale - Reference** | **Scale - 1** |
    +==============================+=======================+===============+
    | ``CIECAM02_specification.h`` | [0, 360]              | [0, 1]        |
    +------------------------------+-----------------------+---------------+
    | ``CIECAM02_specification.H`` | [0, 360]              | [0, 1]        |
    +------------------------------+-----------------------+---------------+

    References
    ----------
    :cite:`Fairchild2004c`, :cite:`Luo2013`, :cite:`Moroneya`,
    :cite:`Wikipedia2007a`

    Examples
    --------
    >>> XYZ = np.array([19.01, 20.00, 21.78])
    >>> XYZ_w = np.array([95.05, 100.00, 108.88])
    >>> L_A = 318.31
    >>> Y_b = 20.0
    >>> surround = CIECAM02_VIEWING_CONDITIONS['Average']
    >>> XYZ_to_CIECAM02(XYZ, XYZ_w, L_A, Y_b, surround)  # doctest: +ELLIPSIS
    CIECAM02_Specification(J=41.7310911..., C=0.1047077..., h=219.0484326..., \
s=2.3603053..., Q=195.3713259..., M=0.1088421..., H=278.0607358..., HC=None)
    """

    XYZ = to_domain_100(XYZ)
    XYZ_w = to_domain_100(XYZ_w)
    _X_w, Y_w, _Z_w = tsplit(XYZ_w)
    L_A = as_float_array(L_A)
    Y_b = as_float_array(Y_b)

    n, F_L, N_bb, N_cb, z = tsplit(
        viewing_condition_dependent_parameters(Y_b, Y_w, L_A))

    # Converting *CIE XYZ* tristimulus values to *CMCCAT2000* transform
    # sharpened *RGB* values.
    RGB = dot_vector(CAT02_CAT, XYZ)
    RGB_w = dot_vector(CAT02_CAT, XYZ_w)

    # Computing degree of adaptation :math:`D`.
    D = (degree_of_adaptation(surround.F, L_A)
         if not discount_illuminant else np.ones(L_A.shape))

    # Computing full chromatic adaptation.
    RGB_c = full_chromatic_adaptation_forward(RGB, RGB_w, Y_w, D)
    RGB_wc = full_chromatic_adaptation_forward(RGB_w, RGB_w, Y_w, D)

    # Converting to *Hunt-Pointer-Estevez* colourspace.
    RGB_p = RGB_to_rgb(RGB_c)
    RGB_pw = RGB_to_rgb(RGB_wc)

    # Applying forward post-adaptation non linear response compression.
    RGB_a = post_adaptation_non_linear_response_compression_forward(RGB_p, F_L)
    RGB_aw = post_adaptation_non_linear_response_compression_forward(
        RGB_pw, F_L)

    # Converting to preliminary cartesian coordinates.
    a, b = tsplit(opponent_colour_dimensions_forward(RGB_a))

    # Computing the *hue* angle :math:`h`.
    h = hue_angle(a, b)

    # Computing hue :math:`h` quadrature :math:`H`.
    H = hue_quadrature(h)
    # TODO: Compute hue composition.

    # Computing eccentricity factor *e_t*.
    e_t = eccentricity_factor(h)

    # Computing achromatic responses for the stimulus and the whitepoint.
    A = achromatic_response_forward(RGB_a, N_bb)
    A_w = achromatic_response_forward(RGB_aw, N_bb)

    # Computing the correlate of *Lightness* :math:`J`.
    J = lightness_correlate(A, A_w, surround.c, z)

    # Computing the correlate of *brightness* :math:`Q`.
    Q = brightness_correlate(surround.c, J, A_w, F_L)

    # Computing the correlate of *chroma* :math:`C`.
    C = chroma_correlate(J, n, surround.N_c, N_cb, e_t, a, b, RGB_a)

    # Computing the correlate of *colourfulness* :math:`M`.
    M = colourfulness_correlate(C, F_L)

    # Computing the correlate of *saturation* :math:`s`.
    s = saturation_correlate(M, Q)

    return CIECAM02_Specification(J, C, from_range_degrees(h), s, Q, M,
                                  from_range_degrees(H), None)
示例#21
0
def XYZ_to_LLAB(
    XYZ,
    XYZ_0,
    Y_b,
    L,
    surround=VIEWING_CONDITIONS_LLAB[
        'Reference Samples & Images, Average Surround, Subtending < 4']):
    """
    Computes the *:math:`LLAB(l:c)`* colour appearance model correlates.

    Parameters
    ----------
    XYZ : array_like
        *CIE XYZ* tristimulus values of test sample / stimulus.
    XYZ_0 : array_like
        *CIE XYZ* tristimulus values of reference white.
    Y_b : numeric or array_like
        Luminance factor of the background in :math:`cd/m^2`.
    L : numeric or array_like
        Absolute luminance :math:`L` of reference white in :math:`cd/m^2`.
    surround : InductionFactors_LLAB, optional
         Surround viewing conditions induction factors.

    Returns
    -------
    CAM_Specification_LLAB
        *:math:`LLAB(l:c)`* colour appearance model specification.

    Notes
    -----

    +--------------------------+-----------------------+---------------+
    | **Domain**               | **Scale - Reference** | **Scale - 1** |
    +==========================+=======================+===============+
    | ``XYZ``                  | [0, 100]              | [0, 1]        |
    +--------------------------+-----------------------+---------------+
    | ``XYZ_0``                | [0, 100]              | [0, 1]        |
    +--------------------------+-----------------------+---------------+

    +------------------------------+-----------------------+---------------+
    | **Range**                    | **Scale - Reference** | **Scale - 1** |
    +==============================+=======================+===============+
    | ``CAM_Specification_LLAB.h`` | [0, 360]              | [0, 1]        |
    +------------------------------+-----------------------+---------------+

    References
    ----------
    :cite:`Fairchild2013x`, :cite:`Luo1996b`, :cite:`Luo1996c`

    Examples
    --------
    >>> XYZ = np.array([19.01, 20.00, 21.78])
    >>> XYZ_0 = np.array([95.05, 100.00, 108.88])
    >>> Y_b = 20.0
    >>> L = 318.31
    >>> surround = VIEWING_CONDITIONS_LLAB['ref_average_4_minus']
    >>> XYZ_to_LLAB(XYZ, XYZ_0, Y_b, L, surround)  # doctest: +ELLIPSIS
    CAM_Specification_LLAB(J=37.3668650..., C=0.0089496..., h=270..., \
s=0.0002395..., M=0.0190185..., HC=None, a=..., b=-0.0190185...)
    """

    _X, Y, _Z = tsplit(to_domain_100(XYZ))
    RGB = XYZ_to_RGB_LLAB(to_domain_100(XYZ))
    RGB_0 = XYZ_to_RGB_LLAB(to_domain_100(XYZ_0))

    # Reference illuminant *CIE Standard Illuminant D Series* *D65*.
    XYZ_0r = np.array([95.05, 100.00, 108.88])
    RGB_0r = XYZ_to_RGB_LLAB(XYZ_0r)

    # Computing chromatic adaptation.
    XYZ_r = chromatic_adaptation(RGB, RGB_0, RGB_0r, Y, surround.D)

    # -------------------------------------------------------------------------
    # Computing the correlate of *Lightness* :math:`L_L`.
    # -------------------------------------------------------------------------
    # Computing opponent colour dimensions.
    L_L, a, b = tsplit(
        opponent_colour_dimensions(XYZ_r, Y_b, surround.F_S, surround.F_L))

    # Computing perceptual correlates.
    # -------------------------------------------------------------------------
    # Computing the correlate of *chroma* :math:`Ch_L`.
    # -------------------------------------------------------------------------
    Ch_L = chroma_correlate(a, b)

    # -------------------------------------------------------------------------
    # Computing the correlate of *colourfulness* :math:`C_L`.
    # -------------------------------------------------------------------------
    C_L = colourfulness_correlate(L, L_L, Ch_L, surround.F_C)

    # -------------------------------------------------------------------------
    # Computing the correlate of *saturation* :math:`s_L`.
    # -------------------------------------------------------------------------
    s_L = saturation_correlate(Ch_L, L_L)

    # -------------------------------------------------------------------------
    # Computing the *hue* angle :math:`h_L`.
    # -------------------------------------------------------------------------
    h_L = hue_angle(a, b)
    # TODO: Implement hue composition computation.

    # -------------------------------------------------------------------------
    # Computing final opponent signals.
    # -------------------------------------------------------------------------
    A_L, B_L = tsplit(final_opponent_signals(C_L, h_L))

    return CAM_Specification_LLAB(L_L, Ch_L, from_range_degrees(h_L), s_L, C_L,
                                  None, A_L, B_L)
示例#22
0
文件: atd95.py 项目: wenh06/colour
def XYZ_to_ATD95(XYZ, XYZ_0, Y_0, k_1, k_2, sigma=300):
    """
    Computes the *ATD (1995)* colour vision model correlates.

    Parameters
    ----------
    XYZ : array_like
        *CIE XYZ* tristimulus values of test sample / stimulus.
    XYZ_0 : array_like
        *CIE XYZ* tristimulus values of reference white.
    Y_0 : numeric or array_like
        Absolute adapting field luminance in :math:`cd/m^2`.
    k_1 : numeric or array_like
        Application specific weight :math:`k_1`.
    k_2 : numeric or array_like
        Application specific weight :math:`k_2`.
    sigma : numeric or array_like, optional
        Constant :math:`\\sigma` varied to predict different types of data.

    Returns
    -------
    CAM_Specification_ATD95
        *ATD (1995)* colour vision model specification.

    Notes
    -----
    +---------------------------+-----------------------+---------------+
    | **Domain**                | **Scale - Reference** | **Scale - 1** |
    +===========================+=======================+===============+
    | ``XYZ``                   | [0, 100]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+
    | ``XYZ_0``                 | [0, 100]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+

    +-------------------------------+-----------------------+---------------+
    | **Range**                     | **Scale - Reference** | **Scale - 1** |
    +===============================+=======================+===============+
    | ``CAM_Specification_ATD95.h`` | [0, 360]              | [0, 1]        |
    +-------------------------------+-----------------------+---------------+

    -   For unrelated colors, there is only self-adaptation and :math:`k_1` is
        set to 1.0 while :math:`k_2` is set to 0.0. For related colors such as
        typical colorimetric applications, :math:`k_1` is set to 0.0 and
        :math:`k_2` is set to a value between 15 and 50 *(Guth, 1995)*.

    References
    ----------
    :cite:`Fairchild2013v`, :cite:`Guth1995a`

    Examples
    --------
    >>> XYZ = np.array([19.01, 20.00, 21.78])
    >>> XYZ_0 = np.array([95.05, 100.00, 108.88])
    >>> Y_0 = 318.31
    >>> k_1 = 0.0
    >>> k_2 = 50.0
    >>> XYZ_to_ATD95(XYZ, XYZ_0, Y_0, k_1, k_2)  # doctest: +ELLIPSIS
    CAM_Specification_ATD95(h=1.9089869..., C=1.2064060..., Q=0.1814003..., \
A_1=0.1787931... T_1=0.0286942..., D_1=0.0107584..., A_2=0.0192182..., \
T_2=0.0205377..., D_2=0.0107584...)
    """

    XYZ = to_domain_100(XYZ)
    XYZ_0 = to_domain_100(XYZ_0)
    Y_0 = as_float_array(Y_0)
    k_1 = as_float_array(k_1)
    k_2 = as_float_array(k_2)
    sigma = as_float_array(sigma)

    XYZ = luminance_to_retinal_illuminance(XYZ, Y_0)
    XYZ_0 = luminance_to_retinal_illuminance(XYZ_0, Y_0)

    # Computing adaptation model.
    LMS = XYZ_to_LMS_ATD95(XYZ)
    XYZ_a = k_1[..., np.newaxis] * XYZ + k_2[..., np.newaxis] * XYZ_0
    LMS_a = XYZ_to_LMS_ATD95(XYZ_a)

    LMS_g = LMS * (sigma[..., np.newaxis] / (sigma[..., np.newaxis] + LMS_a))

    # Computing opponent colour dimensions.
    A_1, T_1, D_1, A_2, T_2, D_2 = tsplit(opponent_colour_dimensions(LMS_g))

    # Computing the correlate of *brightness* :math:`Br`.
    Br = spow(A_1**2 + T_1**2 + D_1**2, 0.5)

    # Computing the correlate of *saturation* :math:`C`.
    C = spow(T_2**2 + D_2**2, 0.5) / A_2

    # Computing the *hue* :math:`H`. Note that the reference does not take the
    # modulus of the :math:`H`, thus :math:`H` can exceed 360 degrees.
    H = T_2 / D_2

    return CAM_Specification_ATD95(from_range_degrees(H), C, Br, A_1, T_1, D_1,
                                   A_2, T_2, D_2)
示例#23
0
def UCS_Luo2006_to_JMh_CIECAM02(Jpapbp, coefficients):
    """
    Converts from one of the *Luo et al. (2006)* *CAM02-LCD*, *CAM02-SCD*, or
    *CAM02-UCS* colourspaces :math:`J'a'b'` array to *CIECAM02* :math:`JMh`
    correlates array.

    Parameters
    ----------
    Jpapbp : array_like
        *Luo et al. (2006)* *CAM02-LCD*, *CAM02-SCD*, or *CAM02-UCS*
        colourspaces :math:`J'a'b'` array.
    coefficients : array_like
        Coefficients of one of the *Luo et al. (2006)* *CAM02-LCD*,
        *CAM02-SCD*, or *CAM02-UCS* colourspaces.

    Returns
    -------
    ndarray
        *CIECAM02* correlates array :math:`JMh`.

    Notes
    -----

    +------------+------------------------+--------------------+
    | **Domain** |  **Scale - Reference** | **Scale - 1**      |
    +============+========================+====================+
    | ``Jpapbp`` | ``Jp_1`` : [0, 100]    | ``Jp_1`` : [0, 1]  |
    |            |                        |                    |
    |            | ``ap_1`` : [-100, 100] | ``ap_1`` : [-1, 1] |
    |            |                        |                    |
    |            | ``bp_1`` : [-100, 100] | ``bp_1`` : [-1, 1] |
    +------------+------------------------+--------------------+

    +------------+------------------------+--------------------+
    | **Range**  |  **Scale - Reference** | **Scale - 1**      |
    +============+========================+====================+
    | ``JMh``    | ``J`` : [0, 100]       | ``J`` : [0, 1]     |
    |            |                        |                    |
    |            | ``M`` : [0, 100]       | ``M`` : [0, 1]     |
    |            |                        |                    |
    |            | ``h`` : [0, 360]       | ``h`` : [0, 1]     |
    +------------+------------------------+--------------------+

    Examples
    --------
    >>> Jpapbp = np.array([54.90433134, -0.08450395, -0.06854831])
    >>> UCS_Luo2006_to_JMh_CIECAM02(
    ...     Jpapbp, COEFFICIENTS_UCS_LUO2006['CAM02-LCD'])
    ... # doctest: +ELLIPSIS
    array([  4.1731091...e+01,   1.0884217...e-01,   2.1904843...e+02])
    """

    J_p, a_p, b_p = tsplit(to_domain_100(Jpapbp))
    _K_L, c_1, c_2 = tsplit(coefficients)

    J = -J_p / (c_1 * J_p - 1 - 100 * c_1)

    M_p, h = tsplit(cartesian_to_polar(tstack([a_p, b_p])))

    M = (np.exp(M_p / (1 / c_2)) - 1) / c_2

    JMh = tstack([
        from_range_100(J),
        from_range_100(M),
        from_range_degrees(np.degrees(h) % 360)
    ])

    return JMh
示例#24
0
def XYZ_to_Nayatani95(XYZ, XYZ_n, Y_o, E_o, E_or, n=1):
    """
    Computes the *Nayatani (1995)* colour appearance model correlates.

    Parameters
    ----------
    XYZ : array_like
        *CIE XYZ* tristimulus values of test sample / stimulus.
    XYZ_n : array_like
        *CIE XYZ* tristimulus values of reference white.
    Y_o : numeric or array_like
        Luminance factor :math:`Y_o` of achromatic background as percentage
        normalised to domain [0.18, 1.0] in **'Reference'** domain-range scale.
    E_o : numeric or array_like
        Illuminance :math:`E_o` of the viewing field in lux.
    E_or : numeric or array_like
        Normalising illuminance :math:`E_{or}` in lux usually normalised to
        domain [1000, 3000].
    n : numeric or array_like, optional
        Noise term used in the non linear chromatic adaptation model.

    Returns
    -------
    Nayatani95_Specification
        *Nayatani (1995)* colour appearance model specification.

    Notes
    -----

    +--------------------------------+-----------------------+---------------+
    | **Domain**                     | **Scale - Reference** | **Scale - 1** |
    +================================+=======================+===============+
    | ``XYZ``                        | [0, 100]              | [0, 1]        |
    +--------------------------------+-----------------------+---------------+
    | ``XYZ_n``                      | [0, 100]              | [0, 1]        |
    +--------------------------------+-----------------------+---------------+

    +--------------------------------+-----------------------+---------------+
    | **Range**                      | **Scale - Reference** | **Scale - 1** |
    +================================+=======================+===============+
    | ``Nayatani95_Specification.h`` | [0, 360]              | [0, 1]        |
    +--------------------------------+-----------------------+---------------+

    References
    ----------
    :cite:`Fairchild2013ba`, :cite:`Nayatani1995a`

    Examples
    --------
    >>> XYZ = np.array([19.01, 20.00, 21.78])
    >>> XYZ_n = np.array([95.05, 100.00, 108.88])
    >>> Y_o = 20.0
    >>> E_o = 5000.0
    >>> E_or = 1000.0
    >>> XYZ_to_Nayatani95(XYZ, XYZ_n, Y_o, E_o, E_or)  # doctest: +ELLIPSIS
    Nayatani95_Specification(L_star_P=49.9998829..., C=0.0133550..., \
h=257.5232268..., s=0.0133550..., Q=62.6266734..., M=0.0167262..., H=None, \
HC=None, L_star_N=50.0039154...)
    """

    XYZ = to_domain_100(XYZ)
    XYZ_n = to_domain_100(XYZ_n)
    Y_o = as_float_array(Y_o)
    E_o = as_float_array(E_o)
    E_or = as_float_array(E_or)

    # Computing adapting luminance :math:`L_o` and normalising luminance
    # :math:`L_{or}` in in :math:`cd/m^2`.
    # L_o = illuminance_to_luminance(E_o, Y_o)
    L_or = illuminance_to_luminance(E_or, Y_o)

    # Computing :math:`\\xi` :math:`\\eta`, :math:`\\zeta` values.
    xez = intermediate_values(XYZ_to_xy(XYZ_n / 100))
    xi, eta, _zeta = tsplit(xez)

    # Computing adapting field cone responses.
    RGB_o = (((Y_o[..., np.newaxis] * E_o[..., np.newaxis]) /
              (100 * np.pi)) * xez)

    # Computing stimulus cone responses.
    RGB = XYZ_to_RGB_Nayatani95(XYZ)
    R, G, _B = tsplit(RGB)

    # Computing exponential factors of the chromatic adaptation.
    bRGB_o = exponential_factors(RGB_o)
    bL_or = beta_1(L_or)

    # Computing scaling coefficients :math:`e(R)` and :math:`e(G)`
    eR = scaling_coefficient(R, xi)
    eG = scaling_coefficient(G, eta)

    # Computing opponent colour dimensions.
    # Computing achromatic response :math:`Q`:
    Q_response = achromatic_response(RGB, bRGB_o, xez, bL_or, eR, eG, n)

    # Computing tritanopic response :math:`t`:
    t_response = tritanopic_response(RGB, bRGB_o, xez, n)

    # Computing protanopic response :math:`p`:
    p_response = protanopic_response(RGB, bRGB_o, xez, n)

    # Computing the correlate of *brightness* :math:`B_r`.
    B_r = brightness_correlate(bRGB_o, bL_or, Q_response)

    # Computing *brightness* :math:`B_{rw}` of ideal white.
    brightness_ideal_white = ideal_white_brightness_correlate(
        bRGB_o, xez, bL_or, n)

    # Computing the correlate of achromatic *Lightness* :math:`L_p^\\star`.
    L_star_P = (achromatic_lightness_correlate(Q_response))

    # Computing the correlate of normalised achromatic *Lightness*
    # :math:`L_n^\\star`.
    L_star_N = (normalised_achromatic_lightness_correlate(
        B_r, brightness_ideal_white))

    # Computing the *hue* angle :math:`\\theta`.
    theta = hue_angle(p_response, t_response)
    # TODO: Implement hue quadrature & composition computation.

    # Computing the correlate of *saturation* :math:`S`.
    S_RG, S_YB = tsplit(
        saturation_components(theta, bL_or, t_response, p_response))
    S = saturation_correlate(S_RG, S_YB)

    # Computing the correlate of *chroma* :math:`C`.
    # C_RG, C_YB = tsplit(chroma_components(L_star_P, S_RG, S_YB))
    C = chroma_correlate(L_star_P, S)

    # Computing the correlate of *colourfulness* :math:`M`.
    # TODO: Investigate components usage.
    # M_RG, M_YB = tsplit(colourfulness_components(C_RG, C_YB,
    # brightness_ideal_white))
    M = colourfulness_correlate(C, brightness_ideal_white)

    return Nayatani95_Specification(L_star_P, C, from_range_degrees(theta), S,
                                    B_r, M, None, None, L_star_N)
示例#25
0
def UCS_Luo2006_to_JMh_CIECAM02(Jpapbp: ArrayLike,
                                coefficients: ArrayLike) -> NDArray:
    """
    Convert from one of the *Luo et al. (2006)* *CAM02-LCD*, *CAM02-SCD*, or
    *CAM02-UCS* colourspaces :math:`J'a'b'` array to *CIECAM02* :math:`JMh`
    correlates array.

    Parameters
    ----------
    Jpapbp
        *Luo et al. (2006)* *CAM02-LCD*, *CAM02-SCD*, or *CAM02-UCS*
        colourspaces :math:`J'a'b'` array.
    coefficients
        Coefficients of one of the *Luo et al. (2006)* *CAM02-LCD*,
        *CAM02-SCD*, or *CAM02-UCS* colourspaces.

    Returns
    -------
    :class:`numpy.ndarray`
        *CIECAM02* correlates array :math:`JMh`.

    Notes
    -----
    +------------+------------------------+------------------+
    | **Domain** |  **Scale - Reference** | **Scale - 1**    |
    +============+========================+==================+
    | ``Jpapbp`` | ``Jp`` : [0, 100]      | ``Jp`` : [0, 1]  |
    |            |                        |                  |
    |            | ``ap`` : [-100, 100]   | ``ap`` : [-1, 1] |
    |            |                        |                  |
    |            | ``bp`` : [-100, 100]   | ``bp`` : [-1, 1] |
    +------------+------------------------+------------------+

    +------------+------------------------+------------------+
    | **Range**  |  **Scale - Reference** | **Scale - 1**    |
    +============+========================+==================+
    | ``JMh``    | ``J`` : [0, 100]       | ``J`` : [0, 1]   |
    |            |                        |                  |
    |            | ``M`` : [0, 100]       | ``M`` : [0, 1]   |
    |            |                        |                  |
    |            | ``h`` : [0, 360]       | ``h`` : [0, 1]   |
    +------------+------------------------+------------------+

    Examples
    --------
    >>> Jpapbp = np.array([54.90433134, -0.08450395, -0.06854831])
    >>> UCS_Luo2006_to_JMh_CIECAM02(
    ...     Jpapbp, COEFFICIENTS_UCS_LUO2006['CAM02-LCD'])
    ... # doctest: +ELLIPSIS
    array([  4.1731091...e+01,   1.0884217...e-01,   2.1904843...e+02])
    """

    J_p, a_p, b_p = tsplit(to_domain_100(Jpapbp))
    _K_L, c_1, c_2 = tsplit(coefficients)

    J = -J_p / (c_1 * J_p - 1 - 100 * c_1)

    M_p, h = tsplit(cartesian_to_polar(tstack([a_p, b_p])))

    M = np.expm1(M_p / (1 / c_2)) / c_2

    JMh = tstack([
        from_range_100(J),
        from_range_100(M),
        from_range_degrees(np.degrees(h) % 360),
    ])

    return JMh
示例#26
0
def XYZ_to_LLAB(
        XYZ,
        XYZ_0,
        Y_b,
        L,
        surround=LLAB_VIEWING_CONDITIONS[
            'Reference Samples & Images, Average Surround, Subtending < 4']):
    """
    Computes the *:math:`LLAB(l:c)`* colour appearance model correlates.

    Parameters
    ----------
    XYZ : array_like
        *CIE XYZ* tristimulus values of test sample / stimulus.
    XYZ_0 : array_like
        *CIE XYZ* tristimulus values of reference white.
    Y_b : numeric or array_like
        Luminance factor of the background in :math:`cd/m^2`.
    L : numeric or array_like
        Absolute luminance :math:`L` of reference white in :math:`cd/m^2`.
    surround : LLAB_InductionFactors, optional
         Surround viewing conditions induction factors.

    Returns
    -------
    LLAB_Specification
        *:math:`LLAB(l:c)`* colour appearance model specification.

    Notes
    -----

    +--------------------------+-----------------------+---------------+
    | **Domain**               | **Scale - Reference** | **Scale - 1** |
    +==========================+=======================+===============+
    | ``XYZ``                  | [0, 100]              | [0, 1]        |
    +--------------------------+-----------------------+---------------+
    | ``XYZ_0``                | [0, 100]              | [0, 1]        |
    +--------------------------+-----------------------+---------------+

    +--------------------------+-----------------------+---------------+
    | **Range**                | **Scale - Reference** | **Scale - 1** |
    +==========================+=======================+===============+
    | ``LLAB_Specification.h`` | [0, 360]              | [0, 1]        |
    +--------------------------+-----------------------+---------------+

    References
    ----------
    :cite:`Fairchild2013x`, :cite:`Luo1996b`, :cite:`Luo1996c`

    Examples
    --------
    >>> XYZ = np.array([19.01, 20.00, 21.78])
    >>> XYZ_0 = np.array([95.05, 100.00, 108.88])
    >>> Y_b = 20.0
    >>> L = 318.31
    >>> surround = LLAB_VIEWING_CONDITIONS['ref_average_4_minus']
    >>> XYZ_to_LLAB(XYZ, XYZ_0, Y_b, L, surround)  # doctest: +ELLIPSIS
    LLAB_Specification(J=37.3668650..., C=0.0089496..., h=270..., \
s=0.0002395..., M=0.0190185..., HC=None, a=..., b=-0.0190185...)
    """

    _X, Y, _Z = tsplit(to_domain_100(XYZ))
    RGB = XYZ_to_RGB_LLAB(to_domain_100(XYZ))
    RGB_0 = XYZ_to_RGB_LLAB(to_domain_100(XYZ_0))

    # Reference illuminant *CIE Standard Illuminant D Series* *D65*.
    XYZ_0r = np.array([95.05, 100.00, 108.88])
    RGB_0r = XYZ_to_RGB_LLAB(XYZ_0r)

    # Computing chromatic adaptation.
    XYZ_r = chromatic_adaptation(RGB, RGB_0, RGB_0r, Y, surround.D)

    # -------------------------------------------------------------------------
    # Computing the correlate of *Lightness* :math:`L_L`.
    # -------------------------------------------------------------------------
    # Computing opponent colour dimensions.
    L_L, a, b = tsplit(
        opponent_colour_dimensions(XYZ_r, Y_b, surround.F_S, surround.F_L))

    # Computing perceptual correlates.
    # -------------------------------------------------------------------------
    # Computing the correlate of *chroma* :math:`Ch_L`.
    # -------------------------------------------------------------------------
    Ch_L = chroma_correlate(a, b)

    # -------------------------------------------------------------------------
    # Computing the correlate of *colourfulness* :math:`C_L`.
    # -------------------------------------------------------------------------
    C_L = colourfulness_correlate(L, L_L, Ch_L, surround.F_C)

    # -------------------------------------------------------------------------
    # Computing the correlate of *saturation* :math:`s_L`.
    # -------------------------------------------------------------------------
    s_L = saturation_correlate(Ch_L, L_L)

    # -------------------------------------------------------------------------
    # Computing the *hue* angle :math:`h_L`.
    # -------------------------------------------------------------------------
    h_L = hue_angle(a, b)
    # TODO: Implement hue composition computation.

    # -------------------------------------------------------------------------
    # Computing final opponent signals.
    # -------------------------------------------------------------------------
    A_L, B_L = tsplit(final_opponent_signals(C_L, h_L))

    return LLAB_Specification(L_L, Ch_L, from_range_degrees(h_L), s_L, C_L,
                              None, A_L, B_L)
示例#27
0
def XYZ_to_ATD95(XYZ, XYZ_0, Y_0, k_1, k_2, sigma=300):
    """
    Computes the *ATD (1995)* colour vision model correlates.

    Parameters
    ----------
    XYZ : array_like
        *CIE XYZ* tristimulus values of test sample / stimulus.
    XYZ_0 : array_like
        *CIE XYZ* tristimulus values of reference white.
    Y_0 : numeric or array_like
        Absolute adapting field luminance in :math:`cd/m^2`.
    k_1 : numeric or array_like
        Application specific weight :math:`k_1`.
    k_2 : numeric or array_like
        Application specific weight :math:`k_2`.
    sigma : numeric or array_like, optional
        Constant :math:`\\sigma` varied to predict different types of data.

    Returns
    -------
    ATD95_Specification
        *ATD (1995)* colour vision model specification.

    Notes
    -----
    +---------------------------+-----------------------+---------------+
    | **Domain**                | **Scale - Reference** | **Scale - 1** |
    +===========================+=======================+===============+
    | ``XYZ``                   | [0, 100]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+
    | ``XYZ_0``                 | [0, 100]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+

    +---------------------------+-----------------------+---------------+
    | **Range**                 | **Scale - Reference** | **Scale - 1** |
    +===========================+=======================+===============+
    | ``ATD95_Specification.h`` | [0, 360]              | [0, 1]        |
    +---------------------------+-----------------------+---------------+

    -   For unrelated colors, there is only self-adaptation and :math:`k_1` is
        set to 1.0 while :math:`k_2` is set to 0.0. For related colors such as
        typical colorimetric applications, :math:`k_1` is set to 0.0 and
        :math:`k_2` is set to a value between 15 and 50 *(Guth, 1995)*.

    References
    ----------
    :cite:`Fairchild2013v`, :cite:`Guth1995a`

    Examples
    --------
    >>> XYZ = np.array([19.01, 20.00, 21.78])
    >>> XYZ_0 = np.array([95.05, 100.00, 108.88])
    >>> Y_0 = 318.31
    >>> k_1 = 0.0
    >>> k_2 = 50.0
    >>> XYZ_to_ATD95(XYZ, XYZ_0, Y_0, k_1, k_2)  # doctest: +ELLIPSIS
    ATD95_Specification(h=1.9089869..., C=1.2064060..., Q=0.1814003..., \
A_1=0.1787931... T_1=0.0286942..., D_1=0.0107584..., A_2=0.0192182..., \
T_2=0.0205377..., D_2=0.0107584...)
    """

    XYZ = to_domain_100(XYZ)
    XYZ_0 = to_domain_100(XYZ_0)
    Y_0 = as_float_array(Y_0)
    k_1 = as_float_array(k_1)
    k_2 = as_float_array(k_2)
    sigma = as_float_array(sigma)

    XYZ = luminance_to_retinal_illuminance(XYZ, Y_0)
    XYZ_0 = luminance_to_retinal_illuminance(XYZ_0, Y_0)

    # Computing adaptation model.
    LMS = XYZ_to_LMS_ATD95(XYZ)
    XYZ_a = k_1[..., np.newaxis] * XYZ + k_2[..., np.newaxis] * XYZ_0
    LMS_a = XYZ_to_LMS_ATD95(XYZ_a)

    LMS_g = LMS * (sigma[..., np.newaxis] / (sigma[..., np.newaxis] + LMS_a))

    # Computing opponent colour dimensions.
    A_1, T_1, D_1, A_2, T_2, D_2 = tsplit(opponent_colour_dimensions(LMS_g))

    # Computing the correlate of *brightness* :math:`Br`.
    Br = spow(A_1 ** 2 + T_1 ** 2 + D_1 ** 2, 0.5)

    # Computing the correlate of *saturation* :math:`C`.
    C = spow(T_2 ** 2 + D_2 ** 2, 0.5) / A_2

    # Computing the *hue* :math:`H`. Note that the reference does not take the
    # modulus of the :math:`H`, thus :math:`H` can exceed 360 degrees.
    H = T_2 / D_2

    return ATD95_Specification(
        from_range_degrees(H), C, Br, A_1, T_1, D_1, A_2, T_2, D_2)