def XYZ_to_CAM16(XYZ, XYZ_w, L_A, Y_b, surround=CAM16_VIEWING_CONDITIONS['Average'], discount_illuminant=False): """ Computes the *CAM16* colour appearance model correlates from given *CIE XYZ* tristimulus values. This is the *forward* implementation. Parameters ---------- XYZ : array_like *CIE XYZ* tristimulus values of test sample / stimulus. XYZ_w : array_like *CIE XYZ* tristimulus values of reference white. L_A : numeric or array_like Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`, (often taken to be 20% of the luminance of a white object in the scene). Y_b : numeric or array_like Relative luminance of background :math:`Y_b` in :math:`cd/m^2`. surround : CAM16_InductionFactors, optional Surround viewing conditions induction factors. discount_illuminant : bool, optional Truth value indicating if the illuminant should be discounted. Returns ------- CAM16_Specification *CAM16* colour appearance model specification. Notes ----- +---------------------------+-----------------------+---------------+ | **Domain** | **Scale - Reference** | **Scale - 1** | +===========================+=======================+===============+ | ``XYZ`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``XYZ_w`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ +---------------------------+-----------------------+---------------+ | **Range** | **Scale - Reference** | **Scale - 1** | +===========================+=======================+===============+ | ``CAM16_specification.J`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``CAM16_specification.C`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``CAM16_specification.h`` | [0, 360] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``CAM16_specification.s`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``CAM16_specification.Q`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``CAM16_specification.M`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``CAM16_specification.H`` | [0, 360] | [0, 1] | +---------------------------+-----------------------+---------------+ References ---------- :cite:`Li2017` Examples -------- >>> XYZ = np.array([19.01, 20.00, 21.78]) >>> XYZ_w = np.array([95.05, 100.00, 108.88]) >>> L_A = 318.31 >>> Y_b = 20.0 >>> surround = CAM16_VIEWING_CONDITIONS['Average'] >>> XYZ_to_CAM16(XYZ, XYZ_w, L_A, Y_b, surround) # doctest: +ELLIPSIS CAM16_Specification(J=41.7312079..., C=0.1033557..., h=217.0679597..., \ s=2.3450150..., Q=195.3717089..., M=0.1074367..., H=275.5949861..., HC=None) """ XYZ = to_domain_100(XYZ) XYZ_w = to_domain_100(XYZ_w) _X_w, Y_w, _Z_w = tsplit(XYZ_w) L_A = as_float_array(L_A) Y_b = as_float_array(Y_b) # Step 0 # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values. RGB_w = dot_vector(M_16, XYZ_w) # Computing degree of adaptation :math:`D`. D = (np.clip(degree_of_adaptation(surround.F, L_A), 0, 1) if not discount_illuminant else np.ones(L_A.shape)) n, F_L, N_bb, N_cb, z = tsplit( viewing_condition_dependent_parameters(Y_b, Y_w, L_A)) D_RGB = (D[..., np.newaxis] * Y_w[..., np.newaxis] / RGB_w + 1 - D[..., np.newaxis]) RGB_wc = D_RGB * RGB_w # Applying forward post-adaptation non linear response compression. RGB_aw = post_adaptation_non_linear_response_compression_forward( RGB_wc, F_L) # Computing achromatic responses for the whitepoint. A_w = achromatic_response_forward(RGB_aw, N_bb) # Step 1 # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values. RGB = dot_vector(M_16, XYZ) # Step 2 RGB_c = D_RGB * RGB # Step 3 # Applying forward post-adaptation non linear response compression. RGB_a = post_adaptation_non_linear_response_compression_forward(RGB_c, F_L) # Step 4 # Converting to preliminary cartesian coordinates. a, b = tsplit(opponent_colour_dimensions_forward(RGB_a)) # Computing the *hue* angle :math:`h`. h = hue_angle(a, b) # Step 5 # Computing eccentricity factor *e_t*. e_t = eccentricity_factor(h) # Computing hue :math:`h` quadrature :math:`H`. H = hue_quadrature(h) # TODO: Compute hue composition. # Step 6 # Computing achromatic responses for the stimulus. A = achromatic_response_forward(RGB_a, N_bb) # Step 7 # Computing the correlate of *Lightness* :math:`J`. J = lightness_correlate(A, A_w, surround.c, z) # Step 8 # Computing the correlate of *brightness* :math:`Q`. Q = brightness_correlate(surround.c, J, A_w, F_L) # Step 9 # Computing the correlate of *chroma* :math:`C`. C = chroma_correlate(J, n, surround.N_c, N_cb, e_t, a, b, RGB_a) # Computing the correlate of *colourfulness* :math:`M`. M = colourfulness_correlate(C, F_L) # Computing the correlate of *saturation* :math:`s`. s = saturation_correlate(M, Q) return CAM16_Specification(from_range_100(J), from_range_100(C), from_range_degrees(h), from_range_100(s), from_range_100(Q), from_range_100(M), from_range_degrees(H), None)
def XYZ_to_CAM16( XYZ: ArrayLike, XYZ_w: ArrayLike, L_A: FloatingOrArrayLike, Y_b: FloatingOrArrayLike, surround: Union[ InductionFactors_CIECAM02, InductionFactors_CAM16] = VIEWING_CONDITIONS_CAM16["Average"], discount_illuminant: Boolean = False, ) -> CAM_Specification_CAM16: """ Compute the *CAM16* colour appearance model correlates from given *CIE XYZ* tristimulus values. Parameters ---------- XYZ *CIE XYZ* tristimulus values of test sample / stimulus. XYZ_w *CIE XYZ* tristimulus values of reference white. L_A Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`, (often taken to be 20% of the luminance of a white object in the scene). Y_b Luminous factor of background :math:`Y_b` such as :math:`Y_b = 100 x L_b / L_w` where :math:`L_w` is the luminance of the light source and :math:`L_b` is the luminance of the background. For viewing images, :math:`Y_b` can be the average :math:`Y` value for the pixels in the entire image, or frequently, a :math:`Y` value of 20, approximate an :math:`L^*` of 50 is used. surround Surround viewing conditions induction factors. discount_illuminant Truth value indicating if the illuminant should be discounted. Returns ------- :class:`colour.CAM_Specification_CAM16` *CAM16* colour appearance model specification. Notes ----- +------------+-----------------------+---------------+ | **Domain** | **Scale - Reference** | **Scale - 1** | +============+=======================+===============+ | ``XYZ`` | [0, 100] | [0, 1] | +------------+-----------------------+---------------+ | ``XYZ_w`` | [0, 100] | [0, 1] | +------------+-----------------------+---------------+ +-------------------------------+-----------------------+---------------+ | **Range** | **Scale - Reference** | **Scale - 1** | +===============================+=======================+===============+ | ``CAM_Specification_CAM16.J`` | [0, 100] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_CAM16.C`` | [0, 100] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_CAM16.h`` | [0, 360] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_CAM16.s`` | [0, 100] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_CAM16.Q`` | [0, 100] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_CAM16.M`` | [0, 100] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_CAM16.H`` | [0, 400] | [0, 1] | +-------------------------------+-----------------------+---------------+ References ---------- :cite:`Li2017` Examples -------- >>> XYZ = np.array([19.01, 20.00, 21.78]) >>> XYZ_w = np.array([95.05, 100.00, 108.88]) >>> L_A = 318.31 >>> Y_b = 20.0 >>> surround = VIEWING_CONDITIONS_CAM16['Average'] >>> XYZ_to_CAM16(XYZ, XYZ_w, L_A, Y_b, surround) # doctest: +ELLIPSIS CAM_Specification_CAM16(J=41.7312079..., C=0.1033557..., \ h=217.0679597..., s=2.3450150..., Q=195.3717089..., M=0.1074367..., \ H=275.5949861..., HC=None) """ XYZ = to_domain_100(XYZ) XYZ_w = to_domain_100(XYZ_w) _X_w, Y_w, _Z_w = tsplit(XYZ_w) L_A = as_float_array(L_A) Y_b = as_float_array(Y_b) # Step 0 # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values. RGB_w = vector_dot(MATRIX_16, XYZ_w) # Computing degree of adaptation :math:`D`. D = (np.clip(degree_of_adaptation(surround.F, L_A), 0, 1) if not discount_illuminant else ones(L_A.shape)) n, F_L, N_bb, N_cb, z = viewing_condition_dependent_parameters( Y_b, Y_w, L_A) D_RGB = (D[..., np.newaxis] * Y_w[..., np.newaxis] / RGB_w + 1 - D[..., np.newaxis]) RGB_wc = D_RGB * RGB_w # Applying forward post-adaptation non-linear response compression. RGB_aw = post_adaptation_non_linear_response_compression_forward( RGB_wc, F_L) # Computing achromatic responses for the whitepoint. A_w = achromatic_response_forward(RGB_aw, N_bb) # Step 1 # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values. RGB = vector_dot(MATRIX_16, XYZ) # Step 2 RGB_c = D_RGB * RGB # Step 3 # Applying forward post-adaptation non-linear response compression. RGB_a = post_adaptation_non_linear_response_compression_forward(RGB_c, F_L) # Step 4 # Converting to preliminary cartesian coordinates. a, b = tsplit(opponent_colour_dimensions_forward(RGB_a)) # Computing the *hue* angle :math:`h`. h = hue_angle(a, b) # Step 5 # Computing eccentricity factor *e_t*. e_t = eccentricity_factor(h) # Computing hue :math:`h` quadrature :math:`H`. H = hue_quadrature(h) # TODO: Compute hue composition. # Step 6 # Computing achromatic responses for the stimulus. A = achromatic_response_forward(RGB_a, N_bb) # Step 7 # Computing the correlate of *Lightness* :math:`J`. J = lightness_correlate(A, A_w, surround.c, z) # Step 8 # Computing the correlate of *brightness* :math:`Q`. Q = brightness_correlate(surround.c, J, A_w, F_L) # Step 9 # Computing the correlate of *chroma* :math:`C`. C = chroma_correlate(J, n, surround.N_c, N_cb, e_t, a, b, RGB_a) # Computing the correlate of *colourfulness* :math:`M`. M = colourfulness_correlate(C, F_L) # Computing the correlate of *saturation* :math:`s`. s = saturation_correlate(M, Q) return CAM_Specification_CAM16( as_float(from_range_100(J)), as_float(from_range_100(C)), as_float(from_range_degrees(h)), as_float(from_range_100(s)), as_float(from_range_100(Q)), as_float(from_range_100(M)), as_float(from_range_degrees(H, 400)), None, )
def XYZ_to_CAM16(XYZ, XYZ_w, L_A, Y_b, surround=CAM16_VIEWING_CONDITIONS['Average'], discount_illuminant=False): """ Computes the *CAM16* colour appearance model correlates from given *CIE XYZ* tristimulus values. This is the *forward* implementation. Parameters ---------- XYZ : array_like *CIE XYZ* tristimulus values of test sample / stimulus. XYZ_w : array_like *CIE XYZ* tristimulus values of reference white. L_A : numeric or array_like Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`, (often taken to be 20% of the luminance of a white object in the scene). Y_b : numeric or array_like Relative luminance of background :math:`Y_b` in :math:`cd/m^2`. surround : CAM16_InductionFactors, optional Surround viewing conditions induction factors. discount_illuminant : bool, optional Truth value indicating if the illuminant should be discounted. Returns ------- CAM16_Specification *CAM16* colour appearance model specification. Notes ----- +---------------------------+-----------------------+---------------+ | **Domain** | **Scale - Reference** | **Scale - 1** | +===========================+=======================+===============+ | ``XYZ`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``XYZ_w`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ +---------------------------+-----------------------+---------------+ | **Range** | **Scale - Reference** | **Scale - 1** | +===========================+=======================+===============+ | ``CAM16_Specification.h`` | [0, 360] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``CAM16_Specification.H`` | [0, 360] | [0, 1] | +---------------------------+-----------------------+---------------+ References ---------- :cite:`Li2017` Examples -------- >>> XYZ = np.array([19.01, 20.00, 21.78]) >>> XYZ_w = np.array([95.05, 100.00, 108.88]) >>> L_A = 318.31 >>> Y_b = 20.0 >>> surround = CAM16_VIEWING_CONDITIONS['Average'] >>> XYZ_to_CAM16(XYZ, XYZ_w, L_A, Y_b, surround) # doctest: +ELLIPSIS CAM16_Specification(J=41.7312079..., C=0.1033557..., h=217.0679597..., \ s=2.3450150..., Q=195.3717089..., M=0.1074367..., H=275.5949861..., HC=None) """ XYZ = to_domain_100(XYZ) XYZ_w = to_domain_100(XYZ_w) _X_w, Y_w, _Z_w = tsplit(XYZ_w) L_A = as_float_array(L_A) Y_b = as_float_array(Y_b) # Step 0 # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values. RGB_w = dot_vector(M_16, XYZ_w) # Computing degree of adaptation :math:`D`. D = (np.clip(degree_of_adaptation(surround.F, L_A), 0, 1) if not discount_illuminant else np.ones(L_A.shape)) n, F_L, N_bb, N_cb, z = tsplit( viewing_condition_dependent_parameters(Y_b, Y_w, L_A)) D_RGB = (D[..., np.newaxis] * Y_w[..., np.newaxis] / RGB_w + 1 - D[..., np.newaxis]) RGB_wc = D_RGB * RGB_w # Applying forward post-adaptation non linear response compression. RGB_aw = post_adaptation_non_linear_response_compression_forward( RGB_wc, F_L) # Computing achromatic responses for the whitepoint. A_w = achromatic_response_forward(RGB_aw, N_bb) # Step 1 # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values. RGB = dot_vector(M_16, XYZ) # Step 2 RGB_c = D_RGB * RGB # Step 3 # Applying forward post-adaptation non linear response compression. RGB_a = post_adaptation_non_linear_response_compression_forward(RGB_c, F_L) # Step 4 # Converting to preliminary cartesian coordinates. a, b = tsplit(opponent_colour_dimensions_forward(RGB_a)) # Computing the *hue* angle :math:`h`. h = hue_angle(a, b) # Step 5 # Computing eccentricity factor *e_t*. e_t = eccentricity_factor(h) # Computing hue :math:`h` quadrature :math:`H`. H = hue_quadrature(h) # TODO: Compute hue composition. # Step 6 # Computing achromatic responses for the stimulus. A = achromatic_response_forward(RGB_a, N_bb) # Step 7 # Computing the correlate of *Lightness* :math:`J`. J = lightness_correlate(A, A_w, surround.c, z) # Step 8 # Computing the correlate of *brightness* :math:`Q`. Q = brightness_correlate(surround.c, J, A_w, F_L) # Step 9 # Computing the correlate of *chroma* :math:`C`. C = chroma_correlate(J, n, surround.N_c, N_cb, e_t, a, b, RGB_a) # Computing the correlate of *colourfulness* :math:`M`. M = colourfulness_correlate(C, F_L) # Computing the correlate of *saturation* :math:`s`. s = saturation_correlate(M, Q) return CAM16_Specification(J, C, from_range_degrees(h), s, Q, M, from_range_degrees(H), None)
def XYZ_to_CAM16(XYZ, XYZ_w, L_A, Y_b, surround=CAM16_VIEWING_CONDITIONS['Average'], discount_illuminant=False): """ Computes the *CAM16* colour appearance model correlates from given *CIE XYZ* tristimulus values. This is the *forward* implementation. Parameters ---------- XYZ : array_like *CIE XYZ* tristimulus values of test sample / stimulus in domain [0, 100]. XYZ_w : array_like *CIE XYZ* tristimulus values of reference white in domain [0, 100]. L_A : numeric or array_like Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`, (often taken to be 20% of the luminance of a white object in the scene). Y_b : numeric or array_like Relative luminance of background :math:`Y_b` in :math:`cd/m^2`. surround : CAM16_InductionFactors, optional Surround viewing conditions induction factors. discount_illuminant : bool, optional Truth value indicating if the illuminant should be discounted. Returns ------- CAM16_Specification *CAM16* colour appearance model specification. Warning ------- The input domain of that definition is non standard! Notes ----- - Input *CIE XYZ* tristimulus values are in domain [0, 100]. - Input *CIE XYZ_w* tristimulus values are in domain [0, 100]. References ---------- - :cite:`Li2017` Examples -------- >>> XYZ = np.array([19.01, 20.00, 21.78]) >>> XYZ_w = np.array([95.05, 100.00, 108.88]) >>> L_A = 318.31 >>> Y_b = 20.0 >>> surround = CAM16_VIEWING_CONDITIONS['Average'] >>> XYZ_to_CAM16(XYZ, XYZ_w, L_A, Y_b, surround) # doctest: +ELLIPSIS CAM16_Specification(J=41.7180250..., C=11.9413446..., h=210.3838955..., \ s=25.3564036..., Q=193.0617673..., M=12.4128523..., H=267.0983345..., HC=None) """ _X_w, Y_w, _Z_w = tsplit(XYZ_w) L_A = np.asarray(L_A) Y_b = np.asarray(Y_b) # Step 0 # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values. RGB_w = dot_vector(M_16, XYZ_w) # Computing degree of adaptation :math:`D`. D = (np.clip(degree_of_adaptation(surround.F, L_A), 0, 1) if not discount_illuminant else 1) n, F_L, N_bb, N_cb, z = tsplit( viewing_condition_dependent_parameters(Y_b, Y_w, L_A)) D_RGB = D[..., np.newaxis] * XYZ_w / RGB_w + 1 - D[..., np.newaxis] RGB_wc = D_RGB * RGB_w # Applying forward post-adaptation non linear response compression. RGB_aw = post_adaptation_non_linear_response_compression_forward( RGB_wc, F_L) # Computing achromatic responses for the whitepoint. A_w = achromatic_response_forward(RGB_aw, N_bb) # Step 1 # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values. RGB = dot_vector(M_16, XYZ) # Step 2 RGB_c = D_RGB * RGB # Step 3 # Applying forward post-adaptation non linear response compression. RGB_a = post_adaptation_non_linear_response_compression_forward(RGB_c, F_L) # Step 4 # Converting to preliminary cartesian coordinates. a, b = tsplit(opponent_colour_dimensions_forward(RGB_a)) # Computing the *hue* angle :math:`h`. h = hue_angle(a, b) # Step 5 # Computing eccentricity factor *e_t*. e_t = eccentricity_factor(h) # Computing hue :math:`h` quadrature :math:`H`. H = hue_quadrature(h) # TODO: Compute hue composition. # Step 6 # Computing achromatic responses for the stimulus. A = achromatic_response_forward(RGB_a, N_bb) # Step 7 # Computing the correlate of *Lightness* :math:`J`. J = lightness_correlate(A, A_w, surround.c, z) # Step 8 # Computing the correlate of *brightness* :math:`Q`. Q = brightness_correlate(surround.c, J, A_w, F_L) # Step 9 # Computing the correlate of *chroma* :math:`C`. C = chroma_correlate(J, n, surround.N_c, N_cb, e_t, a, b, RGB_a) # Computing the correlate of *colourfulness* :math:`M`. M = colourfulness_correlate(C, F_L) # Computing the correlate of *saturation* :math:`s`. s = saturation_correlate(M, Q) return CAM16_Specification(J, C, h, s, Q, M, H, None)