def XYZ_to_CAM16(XYZ, XYZ_w, L_A, Y_b, surround=CAM16_VIEWING_CONDITIONS['Average'], discount_illuminant=False): """ Computes the *CAM16* colour appearance model correlates from given *CIE XYZ* tristimulus values. This is the *forward* implementation. Parameters ---------- XYZ : array_like *CIE XYZ* tristimulus values of test sample / stimulus. XYZ_w : array_like *CIE XYZ* tristimulus values of reference white. L_A : numeric or array_like Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`, (often taken to be 20% of the luminance of a white object in the scene). Y_b : numeric or array_like Relative luminance of background :math:`Y_b` in :math:`cd/m^2`. surround : CAM16_InductionFactors, optional Surround viewing conditions induction factors. discount_illuminant : bool, optional Truth value indicating if the illuminant should be discounted. Returns ------- CAM16_Specification *CAM16* colour appearance model specification. Notes ----- +---------------------------+-----------------------+---------------+ | **Domain** | **Scale - Reference** | **Scale - 1** | +===========================+=======================+===============+ | ``XYZ`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``XYZ_w`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ +---------------------------+-----------------------+---------------+ | **Range** | **Scale - Reference** | **Scale - 1** | +===========================+=======================+===============+ | ``CAM16_specification.J`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``CAM16_specification.C`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``CAM16_specification.h`` | [0, 360] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``CAM16_specification.s`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``CAM16_specification.Q`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``CAM16_specification.M`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``CAM16_specification.H`` | [0, 360] | [0, 1] | +---------------------------+-----------------------+---------------+ References ---------- :cite:`Li2017` Examples -------- >>> XYZ = np.array([19.01, 20.00, 21.78]) >>> XYZ_w = np.array([95.05, 100.00, 108.88]) >>> L_A = 318.31 >>> Y_b = 20.0 >>> surround = CAM16_VIEWING_CONDITIONS['Average'] >>> XYZ_to_CAM16(XYZ, XYZ_w, L_A, Y_b, surround) # doctest: +ELLIPSIS CAM16_Specification(J=41.7312079..., C=0.1033557..., h=217.0679597..., \ s=2.3450150..., Q=195.3717089..., M=0.1074367..., H=275.5949861..., HC=None) """ XYZ = to_domain_100(XYZ) XYZ_w = to_domain_100(XYZ_w) _X_w, Y_w, _Z_w = tsplit(XYZ_w) L_A = as_float_array(L_A) Y_b = as_float_array(Y_b) # Step 0 # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values. RGB_w = dot_vector(M_16, XYZ_w) # Computing degree of adaptation :math:`D`. D = (np.clip(degree_of_adaptation(surround.F, L_A), 0, 1) if not discount_illuminant else np.ones(L_A.shape)) n, F_L, N_bb, N_cb, z = tsplit( viewing_condition_dependent_parameters(Y_b, Y_w, L_A)) D_RGB = (D[..., np.newaxis] * Y_w[..., np.newaxis] / RGB_w + 1 - D[..., np.newaxis]) RGB_wc = D_RGB * RGB_w # Applying forward post-adaptation non linear response compression. RGB_aw = post_adaptation_non_linear_response_compression_forward( RGB_wc, F_L) # Computing achromatic responses for the whitepoint. A_w = achromatic_response_forward(RGB_aw, N_bb) # Step 1 # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values. RGB = dot_vector(M_16, XYZ) # Step 2 RGB_c = D_RGB * RGB # Step 3 # Applying forward post-adaptation non linear response compression. RGB_a = post_adaptation_non_linear_response_compression_forward(RGB_c, F_L) # Step 4 # Converting to preliminary cartesian coordinates. a, b = tsplit(opponent_colour_dimensions_forward(RGB_a)) # Computing the *hue* angle :math:`h`. h = hue_angle(a, b) # Step 5 # Computing eccentricity factor *e_t*. e_t = eccentricity_factor(h) # Computing hue :math:`h` quadrature :math:`H`. H = hue_quadrature(h) # TODO: Compute hue composition. # Step 6 # Computing achromatic responses for the stimulus. A = achromatic_response_forward(RGB_a, N_bb) # Step 7 # Computing the correlate of *Lightness* :math:`J`. J = lightness_correlate(A, A_w, surround.c, z) # Step 8 # Computing the correlate of *brightness* :math:`Q`. Q = brightness_correlate(surround.c, J, A_w, F_L) # Step 9 # Computing the correlate of *chroma* :math:`C`. C = chroma_correlate(J, n, surround.N_c, N_cb, e_t, a, b, RGB_a) # Computing the correlate of *colourfulness* :math:`M`. M = colourfulness_correlate(C, F_L) # Computing the correlate of *saturation* :math:`s`. s = saturation_correlate(M, Q) return CAM16_Specification(from_range_100(J), from_range_100(C), from_range_degrees(h), from_range_100(s), from_range_100(Q), from_range_100(M), from_range_degrees(H), None)
def XYZ_to_CAM16(XYZ, XYZ_w, L_A, Y_b, surround=CAM16_VIEWING_CONDITIONS['Average'], discount_illuminant=False): """ Computes the *CAM16* colour appearance model correlates from given *CIE XYZ* tristimulus values. This is the *forward* implementation. Parameters ---------- XYZ : array_like *CIE XYZ* tristimulus values of test sample / stimulus. XYZ_w : array_like *CIE XYZ* tristimulus values of reference white. L_A : numeric or array_like Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`, (often taken to be 20% of the luminance of a white object in the scene). Y_b : numeric or array_like Relative luminance of background :math:`Y_b` in :math:`cd/m^2`. surround : CAM16_InductionFactors, optional Surround viewing conditions induction factors. discount_illuminant : bool, optional Truth value indicating if the illuminant should be discounted. Returns ------- CAM16_Specification *CAM16* colour appearance model specification. Notes ----- +---------------------------+-----------------------+---------------+ | **Domain** | **Scale - Reference** | **Scale - 1** | +===========================+=======================+===============+ | ``XYZ`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``XYZ_w`` | [0, 100] | [0, 1] | +---------------------------+-----------------------+---------------+ +---------------------------+-----------------------+---------------+ | **Range** | **Scale - Reference** | **Scale - 1** | +===========================+=======================+===============+ | ``CAM16_Specification.h`` | [0, 360] | [0, 1] | +---------------------------+-----------------------+---------------+ | ``CAM16_Specification.H`` | [0, 360] | [0, 1] | +---------------------------+-----------------------+---------------+ References ---------- :cite:`Li2017` Examples -------- >>> XYZ = np.array([19.01, 20.00, 21.78]) >>> XYZ_w = np.array([95.05, 100.00, 108.88]) >>> L_A = 318.31 >>> Y_b = 20.0 >>> surround = CAM16_VIEWING_CONDITIONS['Average'] >>> XYZ_to_CAM16(XYZ, XYZ_w, L_A, Y_b, surround) # doctest: +ELLIPSIS CAM16_Specification(J=41.7312079..., C=0.1033557..., h=217.0679597..., \ s=2.3450150..., Q=195.3717089..., M=0.1074367..., H=275.5949861..., HC=None) """ XYZ = to_domain_100(XYZ) XYZ_w = to_domain_100(XYZ_w) _X_w, Y_w, _Z_w = tsplit(XYZ_w) L_A = as_float_array(L_A) Y_b = as_float_array(Y_b) # Step 0 # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values. RGB_w = dot_vector(M_16, XYZ_w) # Computing degree of adaptation :math:`D`. D = (np.clip(degree_of_adaptation(surround.F, L_A), 0, 1) if not discount_illuminant else np.ones(L_A.shape)) n, F_L, N_bb, N_cb, z = tsplit( viewing_condition_dependent_parameters(Y_b, Y_w, L_A)) D_RGB = (D[..., np.newaxis] * Y_w[..., np.newaxis] / RGB_w + 1 - D[..., np.newaxis]) RGB_wc = D_RGB * RGB_w # Applying forward post-adaptation non linear response compression. RGB_aw = post_adaptation_non_linear_response_compression_forward( RGB_wc, F_L) # Computing achromatic responses for the whitepoint. A_w = achromatic_response_forward(RGB_aw, N_bb) # Step 1 # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values. RGB = dot_vector(M_16, XYZ) # Step 2 RGB_c = D_RGB * RGB # Step 3 # Applying forward post-adaptation non linear response compression. RGB_a = post_adaptation_non_linear_response_compression_forward(RGB_c, F_L) # Step 4 # Converting to preliminary cartesian coordinates. a, b = tsplit(opponent_colour_dimensions_forward(RGB_a)) # Computing the *hue* angle :math:`h`. h = hue_angle(a, b) # Step 5 # Computing eccentricity factor *e_t*. e_t = eccentricity_factor(h) # Computing hue :math:`h` quadrature :math:`H`. H = hue_quadrature(h) # TODO: Compute hue composition. # Step 6 # Computing achromatic responses for the stimulus. A = achromatic_response_forward(RGB_a, N_bb) # Step 7 # Computing the correlate of *Lightness* :math:`J`. J = lightness_correlate(A, A_w, surround.c, z) # Step 8 # Computing the correlate of *brightness* :math:`Q`. Q = brightness_correlate(surround.c, J, A_w, F_L) # Step 9 # Computing the correlate of *chroma* :math:`C`. C = chroma_correlate(J, n, surround.N_c, N_cb, e_t, a, b, RGB_a) # Computing the correlate of *colourfulness* :math:`M`. M = colourfulness_correlate(C, F_L) # Computing the correlate of *saturation* :math:`s`. s = saturation_correlate(M, Q) return CAM16_Specification(J, C, from_range_degrees(h), s, Q, M, from_range_degrees(H), None)
def XYZ_to_CAM16(XYZ, XYZ_w, L_A, Y_b, surround=CAM16_VIEWING_CONDITIONS['Average'], discount_illuminant=False): """ Computes the *CAM16* colour appearance model correlates from given *CIE XYZ* tristimulus values. This is the *forward* implementation. Parameters ---------- XYZ : array_like *CIE XYZ* tristimulus values of test sample / stimulus in domain [0, 100]. XYZ_w : array_like *CIE XYZ* tristimulus values of reference white in domain [0, 100]. L_A : numeric or array_like Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`, (often taken to be 20% of the luminance of a white object in the scene). Y_b : numeric or array_like Relative luminance of background :math:`Y_b` in :math:`cd/m^2`. surround : CAM16_InductionFactors, optional Surround viewing conditions induction factors. discount_illuminant : bool, optional Truth value indicating if the illuminant should be discounted. Returns ------- CAM16_Specification *CAM16* colour appearance model specification. Warning ------- The input domain of that definition is non standard! Notes ----- - Input *CIE XYZ* tristimulus values are in domain [0, 100]. - Input *CIE XYZ_w* tristimulus values are in domain [0, 100]. References ---------- - :cite:`Li2017` Examples -------- >>> XYZ = np.array([19.01, 20.00, 21.78]) >>> XYZ_w = np.array([95.05, 100.00, 108.88]) >>> L_A = 318.31 >>> Y_b = 20.0 >>> surround = CAM16_VIEWING_CONDITIONS['Average'] >>> XYZ_to_CAM16(XYZ, XYZ_w, L_A, Y_b, surround) # doctest: +ELLIPSIS CAM16_Specification(J=41.7180250..., C=11.9413446..., h=210.3838955..., \ s=25.3564036..., Q=193.0617673..., M=12.4128523..., H=267.0983345..., HC=None) """ _X_w, Y_w, _Z_w = tsplit(XYZ_w) L_A = np.asarray(L_A) Y_b = np.asarray(Y_b) # Step 0 # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values. RGB_w = dot_vector(M_16, XYZ_w) # Computing degree of adaptation :math:`D`. D = (np.clip(degree_of_adaptation(surround.F, L_A), 0, 1) if not discount_illuminant else 1) n, F_L, N_bb, N_cb, z = tsplit( viewing_condition_dependent_parameters(Y_b, Y_w, L_A)) D_RGB = D[..., np.newaxis] * XYZ_w / RGB_w + 1 - D[..., np.newaxis] RGB_wc = D_RGB * RGB_w # Applying forward post-adaptation non linear response compression. RGB_aw = post_adaptation_non_linear_response_compression_forward( RGB_wc, F_L) # Computing achromatic responses for the whitepoint. A_w = achromatic_response_forward(RGB_aw, N_bb) # Step 1 # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values. RGB = dot_vector(M_16, XYZ) # Step 2 RGB_c = D_RGB * RGB # Step 3 # Applying forward post-adaptation non linear response compression. RGB_a = post_adaptation_non_linear_response_compression_forward(RGB_c, F_L) # Step 4 # Converting to preliminary cartesian coordinates. a, b = tsplit(opponent_colour_dimensions_forward(RGB_a)) # Computing the *hue* angle :math:`h`. h = hue_angle(a, b) # Step 5 # Computing eccentricity factor *e_t*. e_t = eccentricity_factor(h) # Computing hue :math:`h` quadrature :math:`H`. H = hue_quadrature(h) # TODO: Compute hue composition. # Step 6 # Computing achromatic responses for the stimulus. A = achromatic_response_forward(RGB_a, N_bb) # Step 7 # Computing the correlate of *Lightness* :math:`J`. J = lightness_correlate(A, A_w, surround.c, z) # Step 8 # Computing the correlate of *brightness* :math:`Q`. Q = brightness_correlate(surround.c, J, A_w, F_L) # Step 9 # Computing the correlate of *chroma* :math:`C`. C = chroma_correlate(J, n, surround.N_c, N_cb, e_t, a, b, RGB_a) # Computing the correlate of *colourfulness* :math:`M`. M = colourfulness_correlate(C, F_L) # Computing the correlate of *saturation* :math:`s`. s = saturation_correlate(M, Q) return CAM16_Specification(J, C, h, s, Q, M, H, None)
def XYZ_to_CAM16( XYZ: ArrayLike, XYZ_w: ArrayLike, L_A: FloatingOrArrayLike, Y_b: FloatingOrArrayLike, surround: Union[ InductionFactors_CIECAM02, InductionFactors_CAM16] = VIEWING_CONDITIONS_CAM16["Average"], discount_illuminant: Boolean = False, ) -> CAM_Specification_CAM16: """ Compute the *CAM16* colour appearance model correlates from given *CIE XYZ* tristimulus values. Parameters ---------- XYZ *CIE XYZ* tristimulus values of test sample / stimulus. XYZ_w *CIE XYZ* tristimulus values of reference white. L_A Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`, (often taken to be 20% of the luminance of a white object in the scene). Y_b Luminous factor of background :math:`Y_b` such as :math:`Y_b = 100 x L_b / L_w` where :math:`L_w` is the luminance of the light source and :math:`L_b` is the luminance of the background. For viewing images, :math:`Y_b` can be the average :math:`Y` value for the pixels in the entire image, or frequently, a :math:`Y` value of 20, approximate an :math:`L^*` of 50 is used. surround Surround viewing conditions induction factors. discount_illuminant Truth value indicating if the illuminant should be discounted. Returns ------- :class:`colour.CAM_Specification_CAM16` *CAM16* colour appearance model specification. Notes ----- +------------+-----------------------+---------------+ | **Domain** | **Scale - Reference** | **Scale - 1** | +============+=======================+===============+ | ``XYZ`` | [0, 100] | [0, 1] | +------------+-----------------------+---------------+ | ``XYZ_w`` | [0, 100] | [0, 1] | +------------+-----------------------+---------------+ +-------------------------------+-----------------------+---------------+ | **Range** | **Scale - Reference** | **Scale - 1** | +===============================+=======================+===============+ | ``CAM_Specification_CAM16.J`` | [0, 100] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_CAM16.C`` | [0, 100] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_CAM16.h`` | [0, 360] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_CAM16.s`` | [0, 100] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_CAM16.Q`` | [0, 100] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_CAM16.M`` | [0, 100] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_CAM16.H`` | [0, 400] | [0, 1] | +-------------------------------+-----------------------+---------------+ References ---------- :cite:`Li2017` Examples -------- >>> XYZ = np.array([19.01, 20.00, 21.78]) >>> XYZ_w = np.array([95.05, 100.00, 108.88]) >>> L_A = 318.31 >>> Y_b = 20.0 >>> surround = VIEWING_CONDITIONS_CAM16['Average'] >>> XYZ_to_CAM16(XYZ, XYZ_w, L_A, Y_b, surround) # doctest: +ELLIPSIS CAM_Specification_CAM16(J=41.7312079..., C=0.1033557..., \ h=217.0679597..., s=2.3450150..., Q=195.3717089..., M=0.1074367..., \ H=275.5949861..., HC=None) """ XYZ = to_domain_100(XYZ) XYZ_w = to_domain_100(XYZ_w) _X_w, Y_w, _Z_w = tsplit(XYZ_w) L_A = as_float_array(L_A) Y_b = as_float_array(Y_b) # Step 0 # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values. RGB_w = vector_dot(MATRIX_16, XYZ_w) # Computing degree of adaptation :math:`D`. D = (np.clip(degree_of_adaptation(surround.F, L_A), 0, 1) if not discount_illuminant else ones(L_A.shape)) n, F_L, N_bb, N_cb, z = viewing_condition_dependent_parameters( Y_b, Y_w, L_A) D_RGB = (D[..., np.newaxis] * Y_w[..., np.newaxis] / RGB_w + 1 - D[..., np.newaxis]) RGB_wc = D_RGB * RGB_w # Applying forward post-adaptation non-linear response compression. RGB_aw = post_adaptation_non_linear_response_compression_forward( RGB_wc, F_L) # Computing achromatic responses for the whitepoint. A_w = achromatic_response_forward(RGB_aw, N_bb) # Step 1 # Converting *CIE XYZ* tristimulus values to sharpened *RGB* values. RGB = vector_dot(MATRIX_16, XYZ) # Step 2 RGB_c = D_RGB * RGB # Step 3 # Applying forward post-adaptation non-linear response compression. RGB_a = post_adaptation_non_linear_response_compression_forward(RGB_c, F_L) # Step 4 # Converting to preliminary cartesian coordinates. a, b = tsplit(opponent_colour_dimensions_forward(RGB_a)) # Computing the *hue* angle :math:`h`. h = hue_angle(a, b) # Step 5 # Computing eccentricity factor *e_t*. e_t = eccentricity_factor(h) # Computing hue :math:`h` quadrature :math:`H`. H = hue_quadrature(h) # TODO: Compute hue composition. # Step 6 # Computing achromatic responses for the stimulus. A = achromatic_response_forward(RGB_a, N_bb) # Step 7 # Computing the correlate of *Lightness* :math:`J`. J = lightness_correlate(A, A_w, surround.c, z) # Step 8 # Computing the correlate of *brightness* :math:`Q`. Q = brightness_correlate(surround.c, J, A_w, F_L) # Step 9 # Computing the correlate of *chroma* :math:`C`. C = chroma_correlate(J, n, surround.N_c, N_cb, e_t, a, b, RGB_a) # Computing the correlate of *colourfulness* :math:`M`. M = colourfulness_correlate(C, F_L) # Computing the correlate of *saturation* :math:`s`. s = saturation_correlate(M, Q) return CAM_Specification_CAM16( as_float(from_range_100(J)), as_float(from_range_100(C)), as_float(from_range_degrees(h)), as_float(from_range_100(s)), as_float(from_range_100(Q)), as_float(from_range_100(M)), as_float(from_range_degrees(H, 400)), None, )
def XYZ_to_ZCAM( XYZ: ArrayLike, XYZ_w: ArrayLike, L_A: FloatingOrArrayLike, Y_b: FloatingOrArrayLike, surround: InductionFactors_ZCAM = VIEWING_CONDITIONS_ZCAM["Average"], discount_illuminant: Boolean = False, ) -> CAM_Specification_ZCAM: """ Compute the *ZCAM* colour appearance model correlates from given *CIE XYZ* tristimulus values. Parameters ---------- XYZ Absolute *CIE XYZ* tristimulus values of test sample / stimulus. XYZ_w Absolute *CIE XYZ* tristimulus values of the white under reference illuminant. L_A Test adapting field *luminance* :math:`L_A` in :math:`cd/m^2` such as :math:`L_A = L_w * Y_b / 100` (where :math:`L_w` is luminance of the reference white and :math:`Y_b` is the background luminance factor). Y_b Luminous factor of background :math:`Y_b` such as :math:`Y_b = 100 * L_b / L_w` where :math:`L_w` is the luminance of the light source and :math:`L_b` is the luminance of the background. For viewing images, :math:`Y_b` can be the average :math:`Y` value for the pixels in the entire image, or frequently, a :math:`Y` value of 20, approximate an :math:`L^*` of 50 is used. surround Surround viewing conditions induction factors. discount_illuminant Truth value indicating if the illuminant should be discounted. Returns ------- :class:`colour.CAM_Specification_ZCAM` *ZCAM* colour appearance model specification. Warnings -------- The underlying *SMPTE ST 2084:2014* transfer function is an absolute transfer function. Notes ----- - *Safdar, Hardeberg and Luo (2021)* does not specify how the chromatic adaptation to *CIE Standard Illuminant D65* in *Step 0* should be performed. A one-step *Von Kries* chromatic adaptation transform is not symmetrical or transitive when a degree of adaptation is involved. *Safdar, Hardeberg and Luo (2018)* uses *Zhai and Luo (2018)* two-steps chromatic adaptation transform, thus it seems sensible to adopt this transform for the *ZCAM* colour appearance model until more information is available. It is worth noting that a one-step *Von Kries* chromatic adaptation transform with support for degree of adaptation produces values closer to the supplemental document compared to the *Zhai and Luo (2018)* two-steps chromatic adaptation transform but then the *ZCAM* colour appearance model does not round-trip properly. - The underlying *SMPTE ST 2084:2014* transfer function is an absolute transfer function, thus the domain and range values for the *Reference* and *1* scales are only indicative that the data is not affected by scale transformations. +------------+-----------------------+---------------+ | **Domain** | **Scale - Reference** | **Scale - 1** | +============+=======================+===============+ | ``XYZ`` | [UN] | [UN] | +------------+-----------------------+---------------+ | ``XYZ_tw`` | [UN] | [UN] | +------------+-----------------------+---------------+ | ``XYZ_rw`` | [UN] | [UN] | +------------+-----------------------+---------------+ +-------------------------------+-----------------------+---------------+ | **Range** | **Scale - Reference** | **Scale - 1** | +===============================+=======================+===============+ | ``CAM_Specification_ZCAM.J`` | [UN] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_ZCAM.C`` | [UN] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_ZCAM.h`` | [0, 360] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_ZCAM.s`` | [UN] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_ZCAM.Q`` | [UN] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_ZCAM.M`` | [UN] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_ZCAM.H`` | [0, 400] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_ZCAM.HC`` | [UN] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_ZCAM.V`` | [UN] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_ZCAM.K`` | [UN] | [0, 1] | +-------------------------------+-----------------------+---------------+ | ``CAM_Specification_ZCAM.H`` | [UN] | [0, 1] | +-------------------------------+-----------------------+---------------+ References ---------- :cite:`Safdar2018`, :cite:`Safdar2021`, :cite:`Zhai2018` Examples -------- >>> XYZ = np.array([185, 206, 163]) >>> XYZ_w = np.array([256, 264, 202]) >>> L_A = 264 >>> Y_b = 100 >>> surround = VIEWING_CONDITIONS_ZCAM['Average'] >>> XYZ_to_ZCAM(XYZ, XYZ_w, L_A, Y_b, surround) ... # doctest: +ELLIPSIS CAM_Specification_ZCAM(J=92.2504437..., C=3.0216926..., h=196.3245737..., \ s=19.1319556..., Q=321.3408463..., M=10.5256217..., H=237.6114442..., \ HC=None, V=34.7006776..., K=25.8835968..., W=91.6821728...) """ XYZ = to_domain_1(XYZ) XYZ_w = to_domain_1(XYZ_w) _X_w, Y_w, _Z_w = tsplit(XYZ_w) L_A = as_float_array(L_A) Y_b = as_float_array(Y_b) F_s, _F, _c, _N_c = surround # Step 0 (Forward) - Chromatic adaptation from reference illuminant to # "CIE Standard Illuminant D65" illuminant using "CAT02". # Computing degree of adaptation :math:`D`. D = (degree_of_adaptation(surround.F, L_A) if not discount_illuminant else ones(L_A.shape)) XYZ_D65 = chromatic_adaptation_Zhai2018(XYZ, XYZ_w, TVS_D65, D, D, transform="CAT02") # Step 1 (Forward) - Computing factors related with viewing conditions and # independent of the test stimulus. # Background factor :math:`F_b` F_b = np.sqrt(Y_b / Y_w) # Luminance level adaptation factor :math:`F_L` F_L = 0.171 * spow(L_A, 1 / 3) * (1 - np.exp(-48 / 9 * L_A)) # Step 2 (Forward) - Computing achromatic response (:math:`I_z` and # :math:`I_{z,w}`), redness-greenness (:math:`a_z` and :math:`a_{z,w}`), # and yellowness-blueness (:math:`b_z`, :math:`b_{z,w}`). with domain_range_scale("ignore"): I_z, a_z, b_z = tsplit(XYZ_to_Izazbz(XYZ_D65, method="Safdar 2021")) I_z_w, _a_z_w, b_z_w = tsplit( XYZ_to_Izazbz(XYZ_w, method="Safdar 2021")) # Step 3 (Forward) - Computing hue angle :math:`h_z` h_z = hue_angle(a_z, b_z) # Step 4 (Forward) - Computing hue quadrature :math:`H`. H = hue_quadrature(h_z) # Computing eccentricity factor :math:`e_z`. e_z = 1.015 + np.cos(np.radians(89.038 + h_z % 360)) # Step 5 (Forward) - Computing brightness :math:`Q_z`, # lightness :math:`J_z`, colourfulness :math`M_z`, and chroma :math:`C_z` Q_z_p = (1.6 * F_s) / F_b**0.12 Q_z_m = F_s**2.2 * F_b**0.5 * spow(F_L, 0.2) Q_z = 2700 * spow(I_z, Q_z_p) * Q_z_m Q_z_w = 2700 * spow(I_z_w, Q_z_p) * Q_z_m J_z = 100 * (Q_z / Q_z_w) M_z = (100 * (a_z**2 + b_z**2)**0.37 * ((spow(e_z, 0.068) * spow(F_L, 0.2)) / (F_b**0.1 * spow(I_z_w, 0.78)))) C_z = 100 * (M_z / Q_z_w) # Step 6 (Forward) - Computing saturation :math:`S_z`, # vividness :math:`V_z`, blackness :math:`K_z`, and whiteness :math:`W_z`. S_z = 100 * spow(F_L, 0.6) * np.sqrt(M_z / Q_z) V_z = np.sqrt((J_z - 58)**2 + 3.4 * C_z**2) K_z = 100 - 0.8 * np.sqrt(J_z**2 + 8 * C_z**2) W_z = 100 - np.sqrt((100 - J_z)**2 + C_z**2) return CAM_Specification_ZCAM( as_float(from_range_1(J_z)), as_float(from_range_1(C_z)), as_float(from_range_degrees(h_z)), as_float(from_range_1(S_z)), as_float(from_range_1(Q_z)), as_float(from_range_1(M_z)), as_float(from_range_degrees(H, 400)), None, as_float(from_range_1(V_z)), as_float(from_range_1(K_z)), as_float(from_range_1(W_z)), )