# Training: # --------- optimizer = optim.Adam(model.parameters(),lr=meta_lr, betas=(0.9,0.999), amsgrad=False) loss_fn = nn.CrossEntropyLoss() for iteration in range(nepoch): optimizer.zero_grad() meta_train_error = 0.0 meta_valid_error = 0.0 meta_train_acc = 0.0 meta_valid_acc = 0.0 for task in range(batch_size): dataset.change_cluster() # Compute meta-training loss learner = maml.clone() batch_idc = dataset.sample() batch = (h_concat_scaled[batch_idc,:],label[batch_idc]) evaluation_error, evaluation_acc = fast_adapt(batch, learner, loss_fn, update_step, shots) evaluation_error.backward() meta_train_error += evaluation_error.item() meta_train_acc += evaluation_acc.item() # Compute meta-validation loss learner = maml.clone() batch_idc = dataset.sample() batch = (h_concat_scaled[batch_idc,:],label[batch_idc]) evaluation_error, evaluation_acc = fast_adapt(batch,
model = AnalogBeamformer(n_antenna = num_antenna, n_beam = N) maml = MAML(model, lr=fast_lr, first_order=True) # Training: # --------- optimizer = optim.Adam(model.parameters(),lr=meta_lr, betas=(0.9,0.999), amsgrad=False) loss_fn = bf_gain_loss for iteration in range(nepoch): optimizer.zero_grad() meta_train_error = 0.0 meta_valid_error = 0.0 for task in range(batch_size): dataset.change_cluster() # Compute meta-training loss learner = maml.clone() batch_idc = dataset.sample() batch = (h_concat_scaled[batch_idc,:],egc_gain_scaled[batch_idc]) evaluation_error = fast_adapt_est_h(batch, learner, loss_fn, update_step, shots, h_est_force_z) evaluation_error.backward() meta_train_error += evaluation_error.item() # Compute meta-validation loss learner = maml.clone() batch_idc = dataset.sample() batch = (h_concat_scaled[batch_idc,:],egc_gain_scaled[batch_idc]) evaluation_error = fast_adapt_est_h(batch,
nepoch = 50 #-------------------------------------------# # Here should be the data_preparing function # It is expected to return: # train_inp, train_out, val_inp, and val_out #-------------------------------------------# h_real = np.load('D://Github Repositories/mmWave Beam Management/H_Matrices FineGrid/MISO_Static_FineGrid_Hmatrices_real.npy')[:,antenna_sel] h_imag = np.load('D://Github Repositories/mmWave Beam Management/H_Matrices FineGrid/MISO_Static_FineGrid_Hmatrices_imag.npy')[:,antenna_sel] loc = np.load('D://Github Repositories/mmWave Beam Management/H_Matrices FineGrid/MISO_Static_FineGrid_UE_location.npy') # h_real = np.load('/Users/yh9277/Dropbox/ML Beam Alignment/Data/H_Matrices FineGrid/MISO_Static_FineGrid_Hmatrices_real.npy') # h_imag = np.load('/Users/yh9277/Dropbox/ML Beam Alignment/Data/H_Matrices FineGrid/MISO_Static_FineGrid_Hmatrices_imag.npy') BS_loc = [641,435,10] num_samples = h_real.shape[0] gc = GaussianCenters(n_clusters=8, arrival_rate = 1000, cluster_variance = 5, seed=r_seed) sel_samples = gc.sample() # gc.plot_sample(sel_samples) # sel_samples = np.arange(10000) h_real = h_real[sel_samples,:] h_imag = h_imag[sel_samples,:] loc = loc[sel_samples,:] plt.figure(figsize=(8,6)) plt.scatter(loc[:,0], loc[:,1], s=1, label='UE') plt.scatter(BS_loc[0], BS_loc[1], s=10, marker='s', label='BS') plt.legend(loc='lower left') plt.xlabel('x (meter)') plt.ylabel('y (meter)') plt.title('UE Distribution') plt.show()