Example #1
0
def prepare_XT(X,T, T_scaler=T_scaler, gen_plots=False):
    nCMP = 21
    X_multi = view_as_windows(X, (1,nCMP,X.shape[2],X.shape[3])).squeeze().reshape((-1, nCMP, X.shape[2], X.shape[3]))
    X_multi = np.swapaxes(X_multi,1,3)
    X_multi = np.swapaxes(X_multi,1,2)
    
    T_multi = view_as_windows(T, (1, nCMP, T.shape[2])).squeeze().reshape((-1, nCMP, T.shape[2]))[:,nCMP//2,:].squeeze()
    

    X_scaled_multi = X_multi
    T_scaled_multi = T_scaler.transform(T_multi)
    # extract central CMPs for singleCMP network
    X_scaled = X_scaled_multi[:,:,:,nCMP//2:nCMP//2+1]
    T_scaled = T_scaled_multi
    #%%
    if gen_plots:
        plt_nb_T(T_multi, dx=c.jgx*c.dx, dz=c.jlogz*c.dx, fname="../latex/Fig/T_multi")
        plt_nb_T(1e3*T_scaled, dx=c.jgx*dx, dz=c.jlogz*c.dx, fname="../latex/Fig/T_scaled")

        #%%
        # show single training sample
        sample_reveal = nCMP
        plt_nb_T(1e3*np.concatenate((np.squeeze(X_scaled_multi[sample_reveal,:,:,-1]), np.flipud(np.squeeze(X_scaled_multi[sample_reveal,:,:,0]))), axis=0),
                title="CMP first | CMP last", dx=200, dz=1e3*dt*c.jdt, 
                origin_in_middle=True, ylabel="Time(s)", fname="../latex/Fig/X_scaled", cbar_label = "")
        print(np.shape(1e3*T_scaled[sample_reveal-(nCMP+1)//2:sample_reveal+(nCMP-1)//2:nCMP]))

        plt_nb_T(1e3*T_multi[sample_reveal-(nCMP-1)//2:sample_reveal+(nCMP-1)//2,:], 
                dx=100, dz=c.dx*c.jlogz, 
                title="scaled velocity logs")
    
    return X_scaled_multi, T_scaled_multi
Example #2
0
def scale_X_data(X_data_test=X_data, X_scaler=None):
    generate_scaler = False
    if X_scaler == None:
        generate_scaler = True
        # custom scaling of X_data
        X_scaler = StandardScaler() # MinMaxScaler([-1, 1])
        print(np.shape(X_data))
        X_data_cut=X_data[:,:-3,:,:]
        X_matrix = X_data_cut.reshape([X_data_cut.shape[0], -1])
        X_scaler.fit(X_matrix)
        plt_nb_T(np.flipud(1e3*X_scaler.scale_.reshape([X_data_cut.shape[1], -1])), title="Scale",ylabel="Time(s)", dx=200, dz=1e3*dt*jdt, cbar_label="", fname="../latex/Fig/X_scale")

        plt_nb_T(np.flipud(1e3*X_scaler.mean_.reshape([X_data_cut.shape[1], -1])), title="Mean", ylabel="Time(s)", dx=200, dz=1e3*dt*jdt, cbar_label="", fname="../latex/Fig/X_mean")
        plt_nb_T(np.flipud(np.log(X_scaler.var_.reshape([X_data_cut.shape[1], -1]))), title="Variance", ylabel="Time(s)", dx=200, dz=1e3*dt*jdt, cbar_label="", fname="../latex/Fig/X_log_var")

    X_data_test = X_data_test[:,:-3,:,:]
    X_matrix_test = X_data_test.reshape([X_data_test.shape[0], -1])
    X_data_test_matrix_scaled = X_scaler.transform(X_matrix_test)
    X_data_test_scaled = X_data_test_matrix_scaled.reshape(X_data_test.shape)
    X_data_test_scaled = np.clip(X_data_test_scaled, a_min=-1, a_max=1)
    #plt_nb_T(1e3*np.squeeze(np.percentile(abs(X_data_test), axis=0, q=10)), title="Percentile")

    if generate_scaler:
        return X_data_test_scaled, X_scaler
    return X_data_test_scaled
Example #3
0
def show_model_generation():
    stretch_X_train = c.stretch_X_train
    vel = generate_model(stretch_X=stretch_X_train,
                         training_flag=False,
                         crop_flag=False,
                         distort_flag=False,
                         random_state_number=randint(10000))

    vel = rsf_to_np("marmvel.hh")
    plt_nb_T(aug_flip(vel), dx=4, dz=4, fname="../latex/Fig/marm_aug")

    vel = generate_model(stretch_X=stretch_X_train,
                         distort_flag=False,
                         random_state_number=c.random_state_number,
                         show_flag=True)
    plt_nb_T(vel, fname="../latex/Fig/cropMarm")
    N = np.shape(vel)

    vel_example = elastic_transform(np.atleast_3d(vel),
                                    alpha_deform,
                                    sigma_deform,
                                    random_state_number=c.random_state_number,
                                    plot_name="Normal")

    N = np.shape(vel)
    vel_example = generate_model(stretch_X=stretch_X_train,
                                 training_flag=True,
                                 random_state_number=c.random_state_number,
                                 show_flag=True)
    vel1 = generate_model(stretch_X=stretch_X_train,
                          training_flag=False,
                          random_state_number=randint(10000))
    vel2 = generate_model(stretch_X=stretch_X_train,
                          training_flag=False,
                          random_state_number=randint(10000))
    vel3 = generate_model(stretch_X=stretch_X_train,
                          training_flag=False,
                          random_state_number=randint(10000))
    vel4 = generate_model(stretch_X=stretch_X_train,
                          training_flag=False,
                          random_state_number=randint(10000))
    plt_nb_T(np.concatenate((vel_example, vel1, vel2, vel3, vel4), axis=1),
             fname="../latex/Fig/random_model_example")
Example #4
0
def test_on_model(folder="marmvel1D",
                  net_dict=None,
                  prefix="singleCMP",
                  model_filename=None, 
                  distort_flag=False,
                  stretch_X=None,
                  nCMP_max=nCMP,
                  generate_rsf_data_flag=True,
                  jgx=jgx, sxbeg=sxbeg, gxbeg=gxbeg,
                  X_scaler=X_scaler):
    
    if model_filename==None:
        model_filename=f"{folder}.hh"
    
    fig_path = f"../latex/Fig/test_{prefix}_{folder}"
    
    # expand model
    model_output="vel_test.rsf"
    print(model_output)
    vel_test = generate_model(model_input=model_filename, 
                              model_output=model_output, 
                              stretch_X=stretch_X,
                              random_state_number=const.random_state_number,
                              distort_flag=distort_flag,
                              crop_flag=False)
          
    # model data
    if generate_rsf_data_flag:
        cmd(f"mkdir {folder}")
        cmd(f"cp {model_output} {folder}/{model_output}")
        # check stability
        print(f"you chose dt = {dt}, dt < {dx/np.max(vel_test):.4f} should be chosen for stability \n")
        # force stability
        assert dt < dx/np.max(vel_test)
        generate_rsf_data(model_name=f"{folder}/vel_test.rsf", 
                          shots_out=f"{folder}/shots_cmp_test.rsf", 
                          logs_out=f"{folder}/logs_test.rsf")
    
    # read data
    X_data_test, T_data_test = read_rsf_to_np(shots_rsf=f"{folder}/shots_cmp_test.rsf", 
                                              logs_rsf=f"{folder}/logs_test.rsf")
    
    # X_scaled
    X_scaled = scale_X_data(X_data_test, X_scaler)
    
    nCMP = int(net_dict["0"].input.shape[3])
    X_scaled, T_data_test = make_multi_CMP_inputs(X_scaled, T_data_test, nCMP_max)
    sample_reveal = nCMP_max+1
    plt_nb_T(1e3*np.concatenate((np.squeeze(X_scaled[sample_reveal,:,:,-1]), np.flipud(np.squeeze(X_scaled[sample_reveal,:,:,0]))), axis=0),
        title="CMP first | CMP last", dx=200, dz=1e3*dt*jdt, 
        origin_in_middle=True, ylabel="Time(s)", fname=f"{fig_path}_X_scaled", cbar_label = "")
    if nCMP == 1:
        X_scaled = X_scaled[:,:,:,nCMP_max//2:nCMP_max//2+1]
    
    # predict with all networks and save average
    T_pred_total = np.zeros_like(net_dict["0"].predict(X_scaled))    
    T_pred_dict = np.zeros((2*len(net_dict), T_pred_total.shape[0], T_pred_total.shape[1]))
    
    iNet=0
    for net in net_dict.values():
        T_pred_tmp = net.predict(X_scaled)
        T_pred_tmp = T_scaler.inverse_transform(T_pred_tmp)
        T_pred_dict[iNet,:,:] = T_pred_tmp
        T_pred_tmp = net.predict(np.flip(X_scaled, axis=3))
        T_pred_tmp = T_scaler.inverse_transform(T_pred_tmp)
        T_pred_dict[iNet+1,:,:] = T_pred_tmp
        iNet += 2
   
    T_pred = np.mean(T_pred_dict, axis=0)
    variance = np.var(T_pred_dict, axis=0)
    
    
    plt_nb_T(np.sqrt(variance), title="Standard deviation",
             dx=jgx*dx, dz=jlogz*dx,
             fname=f"{fig_path}_inverted_std_dev",
             vmin=0.05, vmax=1)
    
    # plt_nb_T(T_pred-T_data_test, title="Pred-True",
    #          dx=jgx*dx, dz=jlogz*dx,
    #          fname=f"{fig_path}_inverted_std_dev",
    #          vmin=-1, vmax=1)
    
    plt_nb_T(T_pred, title=f"{prefix} estimate, NRMS={nrms(T_pred, T_data_test):.1f}%",
             dx=jgx*dx, dz=jlogz*dx,
             vmin=np.min(1e-3*T_data_test), 
             vmax=np.max(1e-3*T_data_test),
             fname=f"{fig_path}_inverted")
        
    plt_nb_T(T_data_test,
             dx=jgx*dx, dz=jlogz*dx,
             fname=f"{fig_path}_true",
             title=f"True, R2 = {r2_score(T_pred.flatten(), T_data_test.flatten()):.2f}")
    
    print(np.shape(1e3*T_scaled[sample_reveal-(nCMP+1)//2:sample_reveal+(nCMP-1)//2:nCMP]))
Example #5
0
    if distort_flag:
        vel_alpha = (0.9+0.2*resize(np.random.rand(5,10), vel.shape))
        #print(vel_alpha)
        vel *= vel_alpha
    # add water
    # vel = np.concatenate((1500*np.ones((vel.shape[0], 20)), vel), 
    #                      axis=1)
    #vel = ndimage.median_filter(vel, size=(7,3))
    #vel = 1500 * np.ones_like(vel)
    if verbose:
        print(f"Writing to {model_output}")
    np_to_rsf(vel, model_output)
    return vel

vel = generate_model(stretch_X=stretch_X_train, training_flag=False, random_state_number=randint(10000))
plt_nb_T(vel)

#%%
# Stretched Marmousi

vel = generate_model(stretch_X=stretch_X_train, distort_flag = False)
plt_nb_T(vel, fname="../latex/Fig/cropMarm")
N = np.shape(vel)


#%%
# vel_example = elastic_transform(np.atleast_3d(vel), alpha_deform//2, sigma_deform, 
#                                 random_state_number=const.random_state_number, plot_name="Mild")
vel_example = elastic_transform(np.atleast_3d(vel), alpha_deform, sigma_deform, 
                                random_state_number=const.random_state_number, plot_name="Normal")
Example #6
0
T_multi = view_as_windows(T, (1, nCMP, T.shape[2])).squeeze().reshape((-1, nCMP, T.shape[2]))[:,nCMP//2,:].squeeze()

# create scaler for the outputs
T_scaler = StandardScaler().fit(T_multi)
scale = np.copy(T_scaler.scale_)
mean = np.copy(T_scaler.mean_)
np.save("scale", scale)
np.save("mean", mean)

#%%
T_scaler.scale_[:] = 1
T_scaler.mean_[:] = 0

# X has the format (model, CMP, offset, time)

plt_nb_T(X[1,:10, -1,:200], title="Common offset (600 m) gather", dx=c.dx*c.jgx*2, dz=1e3*dt*c.jdt, 
        origin_in_middle=True, ylabel="Time(s)", fname="../latex/Fig/X_short_offset", vmin=-1e-4, vmax=1e-4)

plt_nb_T(T[1,:10,:100], title="Model", dx=c.dx*c.jgx*2, dz=c.dx*c.jlogz, 
        origin_in_middle=True, ylabel="Time(s)", fname="../latex/Fig/X_short_offset")

#X=X[:,:-3,:,:]

#%%
def prepare_XT(X,T, T_scaler=T_scaler, gen_plots=False):
    nCMP = 21
    X_multi = view_as_windows(X, (1,nCMP,X.shape[2],X.shape[3])).squeeze().reshape((-1, nCMP, X.shape[2], X.shape[3]))
    X_multi = np.swapaxes(X_multi,1,3)
    X_multi = np.swapaxes(X_multi,1,2)
    
    T_multi = view_as_windows(T, (1, nCMP, T.shape[2])).squeeze().reshape((-1, nCMP, T.shape[2]))[:,nCMP//2,:].squeeze()