Exemplo n.º 1
0
        'heatmap_array': np.array(heatmaps * 255, dtype=np.uint8),
        'visible': np.array(visible_flags, dtype=np.float32)
    }
    scio.savemat(path, fr_to_save)


def __read_frame(path):
    matcontent = scio.loadmat(path)
    return matcontent['cut'], matcontent['heatmap_array'] / 255.0, matcontent[
        'visible']


if __name__ == '__main__':
    im_r = reg.Regularizer()
    im_r.fixresize(200, 200)
    im_r.percresize(0.5)
    h_r = reg.Regularizer()
    h_r.fixresize(200, 200)
    h_r.percresize(0.5)
    create_dataset(im_regularizer=im_r,
                   heat_regularizer=h_r,
                   enlarge=0.5,
                   cross_radius=10,
                   shade=True)
    c, h, v = read_dataset_random(number=2)
    u.showimage(c[1])
    print(np.shape(h))
    # show heatmap for the first junction of the second item
    u.showimage(u.heatmap_to_rgb(h[1][:, :, 0:1]))
    print(v[1])
# Build up the model
model1 = km.load_model(model1_save_path)

images_ = attach_heat_map(images_, model1)
model2 = km.load_model(model2_save_path)

images_ = attach_heat_map(images_, model2)
model = km.load_model(model3_save_path)

# Testing the model getting some outputs
net_out = model.predict(images_)
net_out = net_out.clip(max=1)
images = (images - np.mean(images)) / np.var(images)
k = 0.15
imgs = get_image_with_mask(images, net_out)
for idx in range(len(images)):
    min = np.min(imgs[idx])
    max = np.max(imgs[idx])
    u.showimage((imgs[idx] - min) / (max - min))
    u.showimages(
        u.get_crops_from_heatmap(images[idx],
                                 np.squeeze(net_out[idx]),
                                 4,
                                 4,
                                 enlarge=0.5,
                                 accept_crop_minimum_dimension_pixels=100))

send_to_telegram = False
if send_to_telegram:
    send_image_from_array(get_image_with_mask(images[0], net_out))
Exemplo n.º 3
0
if __name__ == '__main__':
    # pls specify the name of the image, (png, jpg)
    image_name = "hands.png"

    dataset_path = resources_path("hands_bounding_dataset", "network_test")
    png_path = resources_path("gui", image_name)
    model_path = models_path('deployment', 'transfer_mobilenet.h5')
    read_from_png = True
    preprocessing = True

    height = 224
    width = 224
    if read_from_png:
        images = load(png_path, force_format=(height, width, 3))
        print("Saturation mean: {}".format(np.mean(
            rgb2hsv(images[0])[:, :, 1])))
        print("Value mean: {}".format(np.mean(images[0, :, :, 2])))
        print("Hue mean: {}".format(np.mean(images[0, :, :, 0])))
        hsv2rgb(images[0])
        if preprocessing:
            images_ = preprocess_input(images * 255)

    # Build up the model
    model = km.load_model(model_path, custom_objects={'relu6': relu6})

    # Testing the model getting some outputs
    net_out = model.predict(images_ if preprocessing else images)
    imgs = get_image_with_mask(images, net_out)
    u.showimage(imgs[0] / 255)
Exemplo n.º 4
0
    shape_compressed = False
    if len(frame.shape) == 3 and frame.shape[-1] == 1:
        frame = np.reshape(frame, newshape=frame.shape[:-1])
        shape_compressed = True
    ret = resize(frame, size)
    if shape_compressed:
        ret = np.reshape(ret, newshape=ret.shape + (1, ))
    return ret


def normalize(frame):
    avg = frame.mean()
    std = frame.std()
    frame = (frame - avg) / std
    return frame


def heat_thresh(heat, thresh):
    heatm = np.zeros(np.shape(heat))
    heatm[heat > thresh] = 1
    return heatm


if __name__ == '__main__':
    test = scipy.misc.imread("t.jpg")
    r = Regularizer()
    r.padding(50, 50)
    r.fixresize(50, 50)
    res = r.apply(test)
    u.showimage(res)
        heats.append(heatmaps[rand])
        size += 1
    return ims, heats


def default_train_images_path():
    return resources_path("hands_bounding_dataset", "hands_dataset", "train", "images")


def default_train_annotations_path():
    return resources_path("hands_bounding_dataset", "hands_dataset", "train", "annotations")


def default_test_images_path():
    return resources_path("hands_bounding_dataset", "hands_dataset", "test", "images")


def default_test_annotations_path():
    return resources_path("hands_bounding_dataset", "hands_dataset", "test", "annotations")


if __name__ == '__main__':
    im_f = default_train_images_path()
    an_f = default_train_annotations_path()
    images1, heatmaps1 = get_samples_from_dataset_in_order_from_beginning(im_f, an_f, 100)
    # images1, heatmaps1 = get_ordered_batch(images1, heatmaps1, 1, 1)
    images1, heatmaps1 = get_random_batch(images1, heatmaps1, 2)
    u.showimages(images1)
    for heat1 in heatmaps1:
        u.showimage(u.heatmap_to_rgb(heat1))
Exemplo n.º 6
0
def show_frame(frame):
    u.showimage(frame)
Exemplo n.º 7
0
    y_train = np.array(y_train)
    c_train = np.array(c_train)
    h_train = np.array(h_train)
    x_test = np.array(x_test)
    y_test = np.array(y_test)
    c_test = np.array(c_test)
    h_test = np.array(h_test)



    class_weight = count_ones_zeros(y_train, y_test)

    x_train, y_train, c_train, h_train = shuffle_cut_label_conf_h(x_train, y_train, c_train, h_train)
    x_test, y_test, c_test, h_test = shuffle_cut_label_conf_h(x_test, y_test, c_test, h_test)

    u.showimage(x_train[0])
    print(y_train[0], c_train[0])

    if LOAD_MODEL:
        # change the name of the model to be loaded
        model = load_model(resources_path(os.path.join("models", "palm_back", name)))
    else:
        model = unsequential_model_heat()

    model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
    model.summary()
    if TRAIN_MODEL:
        model.fit(x=x_train, y=y_train, batch_size=batch_size, epochs=2, verbose=1, class_weight=class_weight)

        # change the name of the model to save
        model.save(resources_path(os.path.join("models", "palm_back", name)))
Exemplo n.º 8
0

def __heatmap_to_uint8(heat):
    heat = heat * 255
    heat.astype(np.uint8, copy=False)
    return heat


def __heatmap_uint8_to_float32(heat):
    heat.astype(np.float32, copy=False)
    heat = heat / 255
    return heat


def __add_padding(image, right_pad, bottom_pad):
    image = np.hstack((image, np.zeros([image.shape[0], right_pad, image.shape[2]], dtype=image.dtype)))
    image = np.vstack((image, np.zeros([bottom_pad, image.shape[1], image.shape[2]], dtype=image.dtype)))
    return image


def __read_mat(frame, folder=jsonhands_path()):
    path = os.path.join(folder, frame)
    matcont = scio.loadmat(path)
    return matcont['frame'], __heatmap_uint8_to_float32(matcont['heatmap'])


if __name__ == '__main__':
    #create_dataset_shaded_heatmaps(resize_rate=0.5)
    ri, rh = __read_mat("Ricki_unit_8.flv_000152.mat")
    u.showimage(ri)
    u.showimage(rh)
Exemplo n.º 9
0
def means_stds(xarr):
    m = []
    s = []
    for x1 in xarr:
        m.append(x1.mean())
        s.append(x1.std())
    return m, s


def mean(vals, y, test):
    ris = 0
    n = 0
    for i in range(len(vals)):
        if y[i] == test:
            ris += vals[i]
            n += 1
    return ris / n


if __name__ == '__main__':

    regularizer = reg.Regularizer()
    regularizer.rgb2gray()

    # create_dataset_w_heatmaps(savepath=path, im_regularizer=regularizer)

    x, y, c, h = read_dataset_h(path=path, minconf=0.999)
    ind = 40
    u.showimage(np.array(np.dstack((x[ind], x[ind], x[ind])), dtype=np.uint8))
    m, s = means_stds(x)
Exemplo n.º 10
0
    return prop_heatmap_loss(heat_ground,
                             heat_pred,
                             white_priority=white_priority,
                             diff_mapper=diff_mapper)


def prop_heatmap_penalized_fn_loss(heat_ground,
                                   heat_pred,
                                   white_priority=0.0,
                                   delta=0.0):
    diff_mapper = lambda x: -delta / 2 * K.pow(x, 3) + (1 + delta / 2
                                                        ) * K.square(x)
    return prop_heatmap_loss(heat_ground,
                             heat_pred,
                             white_priority=white_priority,
                             diff_mapper=diff_mapper)


if __name__ == '__main__':
    dataset_path = resources_path("hands_bounding_dataset", "network_test")
    images, heat_maps, depths = read_dataset_random(path=dataset_path,
                                                    number=1)
    shifted_mean, norm_factor = static_prop_heatmap_parameters(
        heat_maps[0], -3)
    print(shifted_mean, norm_factor)
    u.showimage(heat_maps[0])
    weight_map = np.square(heat_maps[0] - shifted_mean) / norm_factor
    print(np.mean(weight_map))
    print(weight_map)
    u.showimage(weight_map)
Exemplo n.º 11
0
def count_ones_zeros(y_train, y_test):
    right = 0
    left = 0
    for i in range(len(y_train)):
        if y_train[i] == 1:
            right += 1
        else:
            left += 1
    print("TRAIN R: ", right)
    print("TRAIN L: ", left)
    right = 0
    left = 0
    for i in range(len(y_test)):
        if y_test[i] == 1:
            right += 1
        else:
            left += 1
    print("TEST R: ", right)
    print("TEST L: ", left)


if __name__ == '__main__':
    im_r = reg.Regularizer()
    im_r.fixresize(200, 200)
    im_r.rgb2gray()
    #create_dataset(im_regularizer=im_r)
    c, b = read_dataset_random()
    print(b[0])
    u.showimage(c[0].squeeze())
Exemplo n.º 12
0
def print_one_pred(yt, yp, num=0):
    u.showimage(yt[num])
    u.showimage(yp[num])