예제 #1
0
def color_match(dominant: np.ndarray, threshold: int = 30) -> str:
    """
    matches the dominant color to RED, GREEN, BLUE and OTHER.

    Args:
        dominant (np.ndarray): RBG values of dominant color.
        threshold (int, optional): threshold for matching. Defaults to 30.

    Returns:
        str: matching color for dominant color.
    """
    output = "other"

    img_color = rgb2lab(np.uint8(np.asarray([[dominant]])))
    red = rgb2lab(np.uint8(np.asarray([[[255, 0, 0 + 9]]])))
    green = rgb2lab(np.uint8(np.asarray([[[0, 128, 0]]])))
    blue = rgb2lab(np.uint8(np.asarray([[[0, 0, 255]]])))

    if deltaE_cie76(red, img_color) < threshold:
        output = "red"
    elif deltaE_cie76(green, img_color) < threshold:
        output = "green"
    elif deltaE_cie76(blue, img_color) < threshold:
        output = "blue"
    else:
        pass
    return output
  def detectVertShelf(image, color_threshold=10, minLineLength=300):
    """Detekuje svisle hrany (regaly) na zaklade metriky na RGB.
        Porovna "vzdalenost" barvy regalu od barev v regalu, vzdalenym pixelum da cernou barvu,
        pak se v takovem obrazku detekuji hrany  

      image ... vstupni obrazek, jako numpy array
      color_threshold ... threshold vzdalenosti barvy od barvy regalu
      minLineLength ... minimalni deka hrany pro detekci
      
      vraci puvodni obrazek a detekovane lines (viz cv2.HoughLinesP doc)
    """

    lab = rgb2lab(image)
    regal = [225, 220, 180]   ### approx. color of shelf (globus sediva)
    
    regal_3d = np.uint8(np.asarray([[regal]]))
    dE_regal= deltaE_cie76(rgb2lab(regal_3d), lab)
    
    image_res = image.copy()
    image_res[dE_regal >= color_threshold] = [0,0,0]   ## far away from shelf color -> black
    gray = cv2.cvtColor(image_res, cv2.COLOR_BGR2GRAY)
    #minLineLength=100
    lines = cv2.HoughLinesP(image=gray,rho=1,theta=np.pi, threshold=20,lines=np.array([]), minLineLength=minLineLength, maxLineGap=3)
    
    return image, lines
def cie_de(
    reference: numpy.ndarray,
    distorted: numpy.ndarray,
    dE_function="2000",
    lightness_weight=1.0,
    chroma_weight=1.0,
    hue_weight=1.0,
) -> numpy.ndarray:
    assert reference.shape == distorted.shape, "Shapes do not match"

    if len(reference.shape) == 2:
        reference = reference[numpy.newaxis, ...]
        distorted = distorted[numpy.newaxis, ...]

    reference_lab = rgb2lab(reference)
    distorted_lab = rgb2lab(distorted)

    if dE_function == "2000":
        deltaE = deltaE_ciede2000(reference_lab, distorted_lab,
                                  lightness_weight, chroma_weight, hue_weight)
    elif dE_function == "1994":
        deltaE = deltaE_ciede94(reference_lab, distorted_lab, hue_weight,
                                chroma_weight, lightness_weight)
    elif dE_function == "1976":
        deltaE = deltaE_cie76(reference_lab, distorted_lab)
    else:
        raise NameError(
            "CIE dE function with name {} not found".format(dE_function))
    return deltaE
def match_image_by_color(image, blue_color, green_color, threshold=15):
    selected_image = rgb2lab(np.uint8(np.asarray([image])))
    blue_color = rgb2lab(np.uint8(np.asarray([[blue_color]])))
    green_color = rgb2lab(np.uint8(np.asarray([[green_color]])))

    mask_blue = deltaE_cie76(selected_image, blue_color) < threshold
    mask_green = deltaE_cie76(selected_image, green_color) < threshold

    mask = np.bitwise_or(mask_blue, mask_green)
    mask = mask.reshape(mask.shape[1:])

    mask = mask.astype(np.uint8)
    kernel = np.ones((3, 1), np.uint8)
    mask = cv2.dilate(mask, kernel, iterations=2)

    return np.around(mask.mean(), 3), mask * 255
예제 #5
0
def test():
   color1 = [20,20,200]
   color2 = [20,20,100]

   c1 = rgb2lab(np.uint8(np.asarray([[color1]])))[0][0]
   c2 = rgb2lab(np.uint8(np.asarray([[color2]])))[0][0]
   print(deltaE_cie76(c1,c2))
예제 #6
0
def get_similar_bg(img, bg_colour, threshold):
    """
    Find colour similarity of every pixel in img to specified "colour". Similar colours are shown as BLACK (ie, black background)
    
    Input
    img: MxNx3 image as np.array
    colour: Specified colour for comparison
    threshold: Threshold between similar and non similar colours
    
    Return
    img: MxNx3 image with similar colours as BLACK as np.array
    """
    lab = rgb2lab(img)

    colour_3d = np.uint8(np.asarray([[bg_colour]]))
    black_3d = np.uint8(np.asarray([[0, 0, 0]]))

    dE = deltaE_cie76(rgb2lab(colour_3d), lab)

    img_flat = img.reshape(img.shape[0] * img.shape[1], img.shape[2])
    notBlack = np.array([
        not np.array_equal(item, [0, 0, 0]) for item in img_flat
    ]).reshape(img.shape[:2])

    img[dE < threshold] = black_3d

    return img
    def match_image_by_color(image,
                             rgb_color,
                             threshold=60,
                             number_of_colors=7):
        """
        Evaluates if the rgb_color is present in the image with the given threshold
        
        Parameters:
        image (imb obj): img obj
        rgb_color (x,x,x): RGB rgb_color
        threshold (int): threshold value
        number_of_colors: top number_of_colors to consider
        
        Returns:
        bool: if the rgb_color given is present in the image with the given threshold
        
        """
        image_colors = PicturesElaborator.get_colors(image, number_of_colors)
        if not (image_colors):
            return False

        selected_color = rgb2lab(np.uint8(np.asarray([[rgb_color]])))

        select_image = False
        for i in range(number_of_colors):
            curr_color = rgb2lab(np.uint8(np.asarray([[image_colors[i]]])))
            diff = deltaE_cie76(selected_color, curr_color)
            if (diff < threshold):
                select_image = True

        return select_image
예제 #8
0
def comp_era(era, era_averages, img):
    """
    Compares img with all pictures in the given era. Returns smallest distance

    - Gets era, goes through each artist, goes through each of the artists artworks
    - Collects smallest value (closes similarity) per artist in artist_min_sum var
    - Collects min from all artists in min_sum
    - Returns the min
    """
    # variable to hold minimum values for all artists in era
    min_sum = []
    # get artists from given era
    artists = era_averages[era]
    for artist in artists:
        diff = []
        # tracks similarity between images
        artist_min_sum = 8000
        # go through each artwork
        for i, picture in enumerate(artist):
            if i > 100:
                # don't go through more than 100 per artist (for balancing)
                break
            # compare the given image and the artist's artwork
            comp = deltaE_cie76(picture, img)
            diff.extend(comp)
        tot = 0
        for array in diff:
            # sum all similarity vectors per artist + normalize
            tot += np.sum(array)/max(array)
        # get average similarity to the artist
        tot = tot/len(diff)
        min_sum.extend([tot])
    # average all values
    min_sum = sum(min_sum)/len(min_sum)
    return min_sum
예제 #9
0
def identify_leaf_segments(color_segments, segments):
    n_segments = len(np.unique(segments))
    segment_colors = np.zeros((n_segments, 3))
    green_array = np.array([0, 128, 0])
    found_segments = []
    for row in range(0, segments.shape[0], 10):
        for col in range(0, segments.shape[1], 10):
            # Check if we have found segment color before
            if not segments[row, col] in found_segments:
                # if not append to found segments
                found_segments.append(segments[row, col])
                # and add segment color
                segment_colors[segments[row, col]] = color_segments[row, col]
            # If all segments are found, stop.
            if len(found_segments) == n_segments:
                break

    mse_seg_green = np.apply_along_axis(lambda x: deltaE_cie76(x, green_array),
                                        -1, segment_colors)
    # distance of 120 between segment color and green is experimentally set
    leaf_seg_mask = np.ma.masked_array(mse_seg_green, mse_seg_green < 120).mask
    leaf_seg_color = [
        segment_colors[i] for (i, s) in enumerate(leaf_seg_mask) if s
    ]
    return leaf_seg_color
예제 #10
0
def _video_video_dissim_par(img0, lis):
    k = []
    for j in list(range(len(lis))):
        diff = deltaE_cie76(img0, lis[j])
        res = sum(sum(diff**2)) / (224 * 224)
        k.append(res)
    return k
def labcolor_to_blockid(lab, map_lab):
    colours = {(0, 0): (2, 0),
           (0, 1): (3, 0),
           (0, 2): (4, 0),
           (0, 3): (5, 0),
           (0, 4): (7, 0),
           (0, 5): (14, 0),
           (0, 6): (15, 0),
           (1, 0): (16, 0),
           (1, 1): (17, 0),
           (1, 2): (21, 0),
           (1, 3): (22, 0),
           (1, 4): (24, 0),
           (1, 5): (35, 0),
           (1, 6): (35, 1),
           (2, 0): (35, 2),
           (2, 1): (35, 3),
           (2, 2): (35, 4),
           (2, 3): (35, 5),
           (2, 4): (35, 6),
           (2, 5): (35, 7),
           (2, 6): (35, 8),
           (3, 0): (35, 9),
           (3, 1): (35, 10),
           (3, 2): (35, 11),
           (3, 3): (35, 12),
           (3, 4): (35, 13),
           (3, 5): (35, 14),
           (3, 6): (35, 15),
           (4, 0): (41, 0),
           (4, 1): (42, 0),
           (4, 2): (43, 0),
           (4, 3): (45, 0),
           (4, 4): (46, 1),
           (4, 5): (47, 0),
           (4, 6): (48, 0),
           (5, 0): (49, 0),
           (5, 1): (54, 0),
           (5, 2): (56, 0),
           (5, 3): (57, 0),
           (5, 4): (58, 0),
           (5, 5): (60, 0),
           (5, 6): (61, 0),
           (6, 0): (73, 0),
           (6, 1): (79, 0),
           (6, 2): (80, 0),
           (6, 3): (82, 0),
           (6, 4): (89, 0),
           (6, 5): (103, 0),
           (6, 6): (246, 0)}
    distance = 300
    for k, map_column in enumerate(map_lab):
        for l, map_pixel in enumerate(map_column):
            delta = color.deltaE_cie76(lab.reshape(3),map_pixel)
            #print(delta, map_pixel)
            if delta < distance:
                distance = delta
                block = colours[(k,l)]
    return block
예제 #12
0
def count_similarities(segment):
    if segment.size == 0: return True
    lab_segment = segment
    pivot_pixel = lab_segment[0, 0]
    for i in lab_segment:
        for j in i:
            if deltaE_cie76(pivot_pixel, j) > 8: return False
    return True
예제 #13
0
def _image_video_dissim_par(img0, j):

    #img1 = io.imread(j)
    img1 = resize(j, (224, 224))
    img1 = color.rgb2lab(img1)
    diff = deltaE_cie76(img0, img1)
    res = sum(sum(diff**2)) / (224 * 224)
    return res
예제 #14
0
    def getDifference(self, uploaded_color):
        color = self.dominantColor[0]
        color.getLab()
        print("color dot lab")
        print(color.lab)
        print("uploaded color")
        print(uploaded_color)

        # this entire function has to go into algorithm file
        self.diff = deltaE_cie76(color.lab, uploaded_color)
예제 #15
0
    def match_image_by_color(self, image, color, number_of_colors=5):

        image_colors = self.get_colors(image, number_of_colors)
        selected_color = rgb2lab(np.uint8(np.asarray([[color]])))

        select_image = False
        for i in range(number_of_colors):
            curr_color = rgb2lab(np.uint8(np.asarray([[image_colors[i]]])))
            diff = deltaE_cie76(selected_color, curr_color)

        return diff
def match_image_by_color(image, color, threshold=60, number_of_colors=3):
    image_colors = get_colors(image, number_of_colors, False)
    selected_color = rgb2lab(np.uint8(np.asarray([[color]])))

    select_image = False
    for i in range(number_of_colors):
        curr_color = rgb2lab(np.uint8(np.asarray([[image_colors[i]]])))
        diff = deltaE_cie76(selected_color, curr_color)
        if (diff < threshold):
            select_image = True
    return select_image
예제 #17
0
def _image_image_dissim(i, j):

    img0 = io.imread(i)
    img1 = io.imread(j)
    img0 = resize(img0, (224, 224))
    img0 = color.rgb2lab(img0)
    img1 = resize(img1, (224, 224))
    img1 = color.rgb2lab(img1)
    diff = deltaE_cie76(img0, img1)
    res = sum(sum(diff**2)) / (224 * 224)
    return res
def get_colors_name(color_extracted, ColorSet: dict):
    selected_colors = ""
    min_diff = 9999
    color_extracted_lab = rgb2lab(np.uint8(np.asarray([[color_extracted]])))
    for color_sample in ColorSet:
        color_sample_lab = rgb2lab(np.uint8(np.asarray([[ColorSet[color_sample]]])))
        color_diff = deltaE_cie76(color_extracted_lab, color_sample_lab)
        if min_diff >= color_diff:
            min_diff = color_diff
            selected_colors = color_sample
    return selected_colors
예제 #19
0
def comparison(colorsRGB, Colors: dict):
    selected_color = rgb2lab(np.uint8(np.asarray([[colorsRGB]])))
    min = sys.maxsize
    color_picked = []
    for color in Colors:
        curr_color = rgb2lab(np.uint8(np.asarray([[Colors[color]]])))
        diff = deltaE_cie76(selected_color, curr_color)
        if diff < min:
            min = diff
            color_picked = color
    return color_picked
예제 #20
0
    def get_most_similar_color(self, query_color):

        red_st, green_st, blue_st = self.standardize_rgb(query_color)
        # y_query, u_query, v_query = self.transform_rgb_to_yuv([red_st, green_st, blue_st])
        # distances = np.sum(([y_query, u_query, v_query] - self.data_colors[["Y", "U", "V"]].to_numpy())**2, axis=1)
        distances = deltaE_cie76(
            [red_st, green_st, blue_st],
            self.data_colors[["Red_st", "Green_st", "Blue_st"]].to_numpy())

        idx_min = np.argmin(distances)
        similar_color = self.data_colors.iloc[idx_min].values
        return "_".join([str(x) for x in similar_color[1:4]])
예제 #21
0
def draw(mc, selfie_lab, map_lab):
   x, y, z = mc.player.getPos()
   for i, selfie_column in enumerate(selfie_lab):
      for j, selfie_pixel in enumerate(selfie_column):
         distance = 300
         for k, map_column in enumerate(map_lab):
            for l, map_pixel in enumerate(map_column):
               delta = color.deltaE_cie76(selfie_pixel,map_pixel)
               if delta < distance:
                     distance = delta
                     block = colors[(k,l)]
         mc.setBlock(x-j, y-i+60, z+5, block[0], block[1])
예제 #22
0
def unify_similar_colors(img, delta_threshold: int):
    rgb_image_array = np.array(img)

    lab = rgb2lab(img)

    for color in image_colors:
        color_3d = np.uint8(np.asarray([[color]]))
        dE_color = deltaE_cie76(rgb2lab(color_3d), lab)
        rgb_image_array[dE_color < delta_threshold] = color_3d

    rgb_image = Image.fromarray(rgb_image_array, 'RGB')
    rgb_image = np.uint8(rgb_image)

    return rgb_image
예제 #23
0
def match_image_by_color(image, color, threshold=60, number_of_colors=10):

    image_colors = get_colors(image, number_of_colors, False)
    selected_color = rgb2lab(np.uint8(np.asarray([[color]])))

    select_image = False
    for i in range(number_of_colors):
        curr_color = rgb2lab(np.uint8(np.asarray([[image_colors[i]]])))
        diff = deltaE_cie76(selected_color, curr_color)
        if (diff < threshold):
            print("Color difference value is : {0} \n".format(str(diff)))
            select_image = True

    return select_image
예제 #24
0
 def match_image_by_colour(self, threshold=60, number_of_colours=8):
     for x in range(len(self.a)):
         self.match_colour = self.chose_colour()
         self.get_image()
         self.image_colours = self.get_colors1()  #RGB_colors
         self.selected_colour = rgb2lab(
             np.uint8(np.asarray([self.match_colour])))
         self.select_image = False
         for x in range(self.number_of_colours):
             self.current_colour = rgb2lab(
                 np.uint8(np.asarray([[self.image_colours[x]]])))
             self.diff = deltaE_cie76(self.selected_colour,
                                      self.current_colour)
             if (self.diff < threshold):
                 self.select_image = True
             return self.select_image  #True or False
예제 #25
0
def _image_video_dissim(i, j):
    img0 = io.imread(i)
    ldiff = math.inf
    img0 = resize(img0, (224, 224))
    img0 = color.rgb2lab(img0)

    for frame in _video_frames(j):
        img1 = resize(frame, (224, 224))
        img1 = color.rgb2lab(img1)

        diff = deltaE_cie76(img0, img1)
        res = sum(sum(diff**2)) / (224 * 224)

        if ldiff > res:
            ldiff = res

    return ldiff
def match_foundation_shade(face_hex, foundation_hex_lst):
    """return the 6 closest distance hex codes"""

    # https://stackoverflow.com/questions/44428315/similar-color-detection-in-python

    # turn face_hex into rgb
    face_rgb = hex2rgb(face_hex)
    # turn this rgb color into a numpy array
    face_arr = np.uint8(np.asarray([[face_rgb]]))
    # convert to lab space
    face_lab = rgb2lab(face_arr)

    rgb_lst = []
    # append each rgb color to a list
    for hex_code in foundation_hex_lst:
        foundation_rgb = hex2rgb(hex_code)
        rgb_lst.append(foundation_rgb)

    # make this spectrum of foundation colors a numpy array
    foundation_arr = np.uint8(np.asarray([rgb_lst]))

    #convert the array to lab space
    foundation_lab = rgb2lab(foundation_arr)

    # get the distance between the face and each foundation color in the lab space
    distance_colors = deltaE_cie76(face_lab, foundation_lab)

    # sort the distance from smallest to biggest
    # the distance will not correspond with your foundation_hex_lst
    # so we have to use argsort, indexes where the distance was before we sorted it
    # back when the distance corresponded with the foundation hex list
    sort_distance = np.argsort(distance_colors)

    sort_distance = sort_distance.squeeze()

    # list of the indexes of the top 6 closest distances, where they were when they lined up
    # with the foundation_hex_lst
    closest_6 = sort_distance[0:6]

    top_6_hex = []

    # no we have to find out which foundation corresponds to these indexes
    for index in closest_6:
        top_6_hex.append(foundation_hex_lst[index])

    return top_6_hex
예제 #27
0
def match_color(color_1, ratio_1, color_2, ratio_2, thresh=60):
	
	try:
		color_1 = color_1.tolist()
		color_2 = color_2.tolist()
	except:
		raise ValueError("Bad format for color; provide as numpy arrays")
		
	color_1 = rgb2lab(np.uint8(np.asarray([[color_1]])))
	color_2 = rgb2lab(np.uint8(np.asarray([[color_2]])))
	diff = deltaE_cie76(color_1, color_2)
	
	if diff < thresh:
		if min(ratio_1, ratio_2)/max(ratio_1, ratio_2) > 0.7:
			return 1
	
	return 0
예제 #28
0
def match_image_by_color(self, color, number_of_colors=3):
    logging.info("Calculating the difference to the predefined color".format(
        self.image_path))
    image_colors = self.get_colors(number_of_colors, False)
    selected_color = rgb2lab(np.uint8(np.asarray([[color]])))

    diff_list = []

    for i in range(number_of_colors):
        curr_color = rgb2lab(np.uint8(np.asarray([[image_colors[i]]])))
        diff = deltaE_cie76(selected_color, curr_color)
        diff_list.append(diff)

    if len(diff_list) == 0:
        return 9999

    # print("Minimum Difference: {}".format(min(diff_list)))
    return min(diff_list)
예제 #29
0
def compare_color(color1, color2):
    threshold = 10
    count = 0
    for i in range(len(color1)):
        selected_color = rgb2lab(np.uint8(np.asarray([[color1[i]]])))
        for j in range(len(color2)):
            curr_color = rgb2lab(np.uint8(np.asarray([[color2[j]]])))
            diff = deltaE_cie76(selected_color, curr_color)
            if (diff < threshold):
                count += 1
                break

    if (count < 9):
        #not same
        return False
    else:
        #same
        return True
예제 #30
0
def get_similar(img, colour, threshold):
    """
    Find colour similarity of every pixel in img to specified "colour". Non similar colours which are darker are shown as RED. Non similar colours which are lighter are shown as blue
    
    Input
    img: MxNx3 image as np.array
    colour: Specified colour for comparison
    threshold: Threshold between similar and non similar colours
    
    Return
    img: MxNx3 image with non-similar colours as RED (darker) and BLUE (lighter) as np.array
    red_img: MxNx3 image with ONLY RED marks
    blue_img: MxNx3 image with ONLY BLUE marks
    """

    lab = rgb2lab(img)

    colour_3d = np.uint8(np.asarray([[colour]]))
    red_3d = np.uint8(np.asarray([[255, 0, 0]]))
    blue_3d = np.uint8(np.asarray([[0, 0, 255]]))

    dE = deltaE_cie76(rgb2lab(colour_3d), lab)

    img_flat = img.reshape(img.shape[0] * img.shape[1], img.shape[2])
    notBlack = np.array([
        not np.array_equal(item, [0, 0, 0]) for item in img_flat
    ]).reshape(img.shape[:2])
    img_light = (np.sum(img_flat, axis=1) >= sum(colour)).reshape(
        img.shape[:2])
    img_dark = (np.sum(img_flat, axis=1) < sum(colour)).reshape(img.shape[:2])

    red_segment = np.logical_and(np.logical_and(dE >= threshold, notBlack),
                                 img_dark)
    blue_segment = np.logical_and(np.logical_and(dE >= threshold, notBlack),
                                  img_light)
    img[red_segment] = red_3d
    img[blue_segment] = blue_3d

    red_img = np.zeros(img.shape, dtype="uint8")
    red_img[red_segment] = red_3d
    blue_img = np.zeros(img.shape, dtype="uint8")
    blue_img[blue_segment] = blue_3d

    return img, red_img, blue_img
예제 #31
0
def test_cie76():
    data = load_ciede2000_data()
    N = len(data)
    lab1 = np.zeros((N, 3))
    lab1[:, 0] = data['L1']
    lab1[:, 1] = data['a1']
    lab1[:, 2] = data['b1']

    lab2 = np.zeros((N, 3))
    lab2[:, 0] = data['L2']
    lab2[:, 1] = data['a2']
    lab2[:, 2] = data['b2']

    dE2 = deltaE_cie76(lab1, lab2)
    oracle = np.array([
        4.00106328, 6.31415011, 9.1776999, 2.06270077, 2.36957073,
        2.91529271, 2.23606798, 2.23606798, 4.98000036, 4.9800004,
        4.98000044, 4.98000049, 4.98000036, 4.9800004, 4.98000044,
        3.53553391, 36.86800781, 31.91002977, 30.25309901, 27.40894015,
        0.89242934, 0.7972, 0.8583065, 0.82982507, 3.1819238,
        2.21334297, 1.53890382, 4.60630929, 6.58467989, 3.88641412,
        1.50514845, 2.3237848, 0.94413208, 1.31910843
    ])
    assert_allclose(dE2, oracle, rtol=1.e-8)
예제 #32
0
# -*- coding: utf-8 -*-
import numpy as np
from skimage.color import rgb2lab
from skimage.color import deltaE_cie76
import os

if __name__ == "__main__":
    # pre conpute distance matrix for reduced lab color space
    colors = []
    for r in range(12):
        for g in range(12):
            for b in range(12):
                colors.append((r * 23, g * 23, b * 23))
    base_color = rgb2lab(np.array(colors, dtype=np.uint8).reshape(864, 2, 3)).reshape(1728, 3)
    mat = np.zeros((1728, 1728))
    for i in range(1728):
        for j in range(i + 1, 1728):
            mat[i, j] = deltaE_cie76(base_color[i], base_color[j])
            mat[j, i] = mat[i, j]
    np.save(os.path.join(os.path.dirname(os.path.abspath(__file__)), "color_dist"), mat)
예제 #33
0
def test_single_color_cie76():
    lab1 = (0.5, 0.5, 0.5)
    lab2 = (0.4, 0.4, 0.4)
    deltaE_cie76(lab1, lab2)