예제 #1
0
def save_or_plot_subsample(args, subsample, filename, img_name=None):
    if args.save_tensor == 1: torch.save(subsample, filename)  # save as tensor
    elif args.save_tensor == 0:  # save as numpy array
        if args.sequence_name == 'DeepCloth':
            np.savetxt(filename, subsample)
        else:
            np.savetxt(filename, subsample.to(torch.device("cpu")).numpy())
    elif args.save_tensor == 2:
        functions_plot.plot_RGB_and_landmarks(subsample[:, 0],
                                              subsample[:, 1],
                                              image_path=img_name,
                                              axis_on='off',
                                              visible_colour='y')
        if args.contour == 1:
            functions_plot.show_for_seconds(milliseconds=400000)
        # Load vertex data
        variables = get_variables_from_vertex_full_Dataframe(
            sequence_name=sequence_name,
            dataset_number=dataset_number,
            group_number=group_number,
            animation_frame=animation_frame,
            RT_extended=RT_extended,
            reordered=reordered,
            submesh_idx=submesh_idx,
            verbose=0)
        u_visible = variables['u_visible']
        v_visible = variables['v_visible']
        u_occluded = variables['u_occluded']
        v_occluded = variables['v_occluded']

        # Plot RGB and landmarks
        fig = functions_plot.plot_RGB_and_landmarks(
            u_visible=u_visible,
            v_visible=v_visible,
            u_occluded=u_occluded,
            v_occluded=v_occluded,
            sequence_name=sequence_name,
            dataset_number=dataset_number,
            group_number=group_number,
            animation_frame=animation_frame,
            marker_size=0.01)
        file_name = os.path.join(
            directory_name,
            'Group' + group_number + '_frame' + animation_frame + '.png')
        fig.savefig(file_name)
    #     animation_frame = '1'
    submesh_num_vertices_horizontal = 52
    submesh_num_vertices_vertical = 103
    verbose = 0

    input_png_name = os.path.join('Renders' + sequence_name + dataset_number,
                                  'Group.' + group_number,
                                  animation_frame + '.png')
    u, v = convert_contour_to_uv_from_img_name(input_png_name=input_png_name,
                                               verbose=verbose)

    # Plot uv of contour without straight lines on RGB
    fig = functions_plot.plot_RGB_and_landmarks(
        u_visible=u,
        v_visible=v,
        sequence_name=sequence_name,
        dataset_number=dataset_number,
        group_number=group_number,
        animation_frame=animation_frame,
        marker_size=50 / submesh_num_vertices_vertical)
    plt.show()


# Access all pixels from contour, without omitting the ones which describe straight lines
def append_missing_new_value(w, i, l):
    """ update w (u or v) adding the missing points between w[i] and w[i+1].
    l = len of the original w before undergoing any updates at all"""
    sign = 1 if w[(i + 1) % l] > w[i % l] else -1
    for k in range(sign, w[(i + 1) % l] - w[i % l], sign):
        w = np.append(w, w[i] + k)
    return w
예제 #4
0
        text_name = os.path.join(directory_name, 'uv_width_height_rectROI_' + str(int(animation_frame)) +'.txt')
        uvwh = np.genfromtxt(fname=text_name, dtype='int', delimiter=' ', skip_header=1) 
        u_crop_corner = uvwh[0]
        v_crop_corner = uvwh[1]
        width_crop = uvwh[2]
        height_crop = uvwh[3]

        u_visible_crop = u_visible - u_crop_corner
        v_visible_crop = v_visible - v_crop_corner
        u_occluded_crop = u_occluded - u_crop_corner
        v_occluded_crop = v_occluded - v_crop_corner

        ROI_image_path = os.path.join(directory_name, str(int(animation_frame)) + '_rectROI.png')

        functions_plot.plot_RGB_and_landmarks(u_visible=u_visible_crop, v_visible=v_visible_crop, 
                                              u_occluded=u_occluded_crop, v_occluded=v_occluded_crop,
                                              image_path = ROI_image_path)
        plt.title('Plot RGB (from file path) and landmarks\ncropped image - (u,v)=(0,0) on upper-left corner')






        ###
        ### Plot RGB and landmarks on a cropped sample - loading RGB from transformed dataset
        ###
    #     Cropping option: 
    #     crop_centre_or_ROI==0: centre crop. 
    #     crop_centre_or_ROI==1: Squared box containing the towel. 
    #     crop_centre_or_ROI==2: Rectangular box containing the towel. 
예제 #5
0
    import functions_plot

    args = functions_data_processing.parser()

    DeepCloth_example = os.path.join('DeepCloth2', 'train', 'train_non-text',
                                     'imgs', '027996.png')
    TowelWall_example = os.path.join('RendersTowelWall11', 'Group.003',
                                     '17.png')
    for input_png_name in [DeepCloth_example, TowelWall_example]:
        uv_towel = find_uv_from_img_name(input_png_name=input_png_name,
                                         verbose=0,
                                         sequence_name=args.sequence_name)
        print(uv_towel.shape)
        # Plot uv on RGB
        fig = functions_plot.plot_RGB_and_landmarks(u_visible=uv_towel[:, 0],
                                                    v_visible=uv_towel[:, 1],
                                                    image_path=input_png_name)
        plt.show()

        uv_towel = find_uv_from_img_name(input_png_name=input_png_name,
                                         verbose=0,
                                         numpy_or_tensor=1,
                                         sequence_name=args.sequence_name)
        print(uv_towel.shape)
        # Plot uv on RGB
        fig = functions_plot.plot_RGB_and_landmarks(u_visible=uv_towel[:, 0],
                                                    v_visible=uv_towel[:, 1],
                                                    image_path=input_png_name)
        plt.show()

    show_BnW_n_original_video(