Exemple #1
0
x, z = generate_dataset(img_shape,
                        n=n,
                        image_path=spo_dataset.__path__[0] +
                        '/source_image/illusion.jpg')

x_test = x[:n_train]
z_test = z[:n_train]
x_train = x[n_train:]
z_train = z[n_train:]

# ---- Initialize ---- #
tf.keras.backend.clear_session()
df = DeepNoisyBayesianFilter(hist, img_shape)

# ---- Load weights ---- #
df.load_weights('model_weights_illusion')

# ---- initialize particle filter ---- #
image_path = spo_dataset.__path__[0] + '/source_image/illusion.jpg'
ref_img = cv2.imread(image_path, 0)
ref_img = cv2.resize(ref_img, img_shape, interpolation=cv2.INTER_AREA)

# %%
pf = ParticleFilter_deep(Np=10000,
                         No=2,
                         ref_img=ref_img,
                         radiuses=[18, 22],
                         initial_pose=[[14, 18], [108, 104]],
                         beta=3,
                         likelihood=df)
Exemple #2
0
n = 600
n_test = 300 
n_train = n -  n_test
# ---- Get the dataset ---- #



# ---- initialize particle filter ---- #
ref_img = np.array(data.checkerboard()).astype(np.float64)
ref_img = cv2.resize(ref_img, img_shape,interpolation = cv2.INTER_AREA)
tf.keras.backend.clear_session()
df = DeepNoisyBayesianFilter(hist,img_shape)


# ---- Load weights ---- #
df.load_weights('model_weights_partially_observed_chekkers')

pf = ParticleFilter_deep(Np = 100,
                    No = 1,
                    ref_img = ref_img,
                    radiuses = [18],
                    initial_pose = [[20,10]],
                    beta = 60,
                    likelihood=df)
# ---- Get the dataset ---- #

x, z = generate_dataset(img_shape = img_shape,
                        n = n,
                        image_type = "checkers",
                        partial=True)
        
Exemple #3
0
    n=n,
    image_path=spo_dataset.__path__[0] + '/source_image/tree.jpg',
    mask=spo_dataset.__path__[0] + '/source_image/tree_masked.jpg',
    partial=True)

x_test = x[:n_train]
z_test = z[:n_train]
x_train = x[n_train:]
z_train = z[n_train:]

# ---- Initialize DF ---- #
tf.keras.backend.clear_session()
df = DeepNoisyBayesianFilter(hist, img_shape)

# ---- Load weights ---- #
df.load_weights('model_weights_partially_observed_tree')

# ---- Initialize testing arrays ---- #

# ---- initialize particle filter ---- #
image_path = spo_dataset.__path__[0] + '/source_image/tree.jpg'
ref_img = cv2.imread(image_path, 0)
ref_img = cv2.resize(ref_img, img_shape, interpolation=cv2.INTER_AREA)

pf = ParticleFilter_deep(Np=5000,
                         No=1,
                         ref_img=ref_img,
                         radiuses=[20],
                         initial_pose=[[14, 20]],
                         beta=1,
                         likelihood=df)
Exemple #4
0
# ---- Initialize ---- #
tf.keras.backend.clear_session()
df = DeepNoisyBayesianFilter(hist, img_shape)

# ---- Train ---- #

train_likelihood(df, x_train, z_train, epochs=130)  #100
train_predictor(df, x_train, epochs=10, min_img=2)  #5
train_predictor(df, x_train, epochs=15, min_img=20)  #10
train_update(df, x_train, z_train, epochs=10, min_img=2)  #10
train_relax(df, x_train, z_train, epochs=5, min_img=10)  # 10
train_relax(df, x_train, z_train, epochs=50, min_img=50)  # 10
train_relax(df, x_train, z_train, epochs=50, min_img=None)  # 10

# ---- Or load weights ---- #
df.load_weights('model_weights')

# ---- Test and viualize ---- #
x_old = x_test[:hist, ...].copy()
frames = []
obs_frames = []
state_frames = []
df_frames = []
direct_frames = []
for t in range(0 + hist, n_test - 1):
    z_new = z_test[t].copy()
    z_new_test = z_test[t].copy()
    x_new = x_test[t].copy()
    x_hat_df = df.predict_mean(x_old, z_new)
    x_hat_df = x_hat_df[:, :, 0]
    x_hat_df_like = df.estimate(z_new_test)
x, z = get_dataset_rotating_objects(image_shape=img_shape,
                                    n=n,
                                    var=var,
                                    Ber=True)

x_test = np.array(x[:n_test])
z_test = np.array(z[:n_test])
x_train = np.array(x[n_test:])
z_train = np.array(z[n_test:])

# ---- Initialize DF ---- #
tf.keras.backend.clear_session()
df = DeepNoisyBayesianFilter(hist, img_shape)

# ---- Load weights ---- #
df.load_weights('model_weights_rectangles_very_noisy')

# ---- initialize particle filter ---- #
pf = ParticleFilterRect(Np=10000,
                        var=var,
                        img_shape=img_shape,
                        beta=1,
                        Ber=True)

# ---- Initialize testing arrays ---- #
cm_err_df = []
mass_err_df = []
img_err_df = []
img_kl_df = []

cm_err_pf = []
Exemple #6
0
n_test = 300
n_train = n - n_test
# ---- Get the dataset ---- #

z, x = generate_dataset(img_shape, n=n, image_type="checkers")
x_test = x[:n_train]
z_test = z[:n_train]
x_train = x[n_train:]
z_train = z[n_train:]

# ---- Initialize ---- #
tf.keras.backend.clear_session()
df = DeepNoisyBayesianFilter(hist, img_shape)

# ---- Load weights ---- #
df.load_weights('model_weights_inverse_chekkers')

# ---- initialize particle filter ---- #
ref_img = np.array(data.checkerboard()).astype(np.float64)
ref_img = cv2.resize(ref_img, img_shape, interpolation=cv2.INTER_AREA)

pf = ParticleFilter_inv(Np=10000,
                        No=1,
                        ref_img=ref_img,
                        radiuses=[20],
                        initial_pose=[[14, 18]],
                        beta=1)

# ---- Test and viualize ---- #
x_old = x_test[:hist, ...].copy()
frames = []