Esempio n. 1
0
import scipy.io as sio  # import scipy.io for .mat file I/O
import numpy as np  # import numpy
import function_wmmse_powercontrol as wf  # import our function file
import function_dnn_powercontrol as df  # import our function file

for K in [10, 20, 30]:
    # Problem Setup, K: number of users.
    print('Gaussian IC Case: K=%d' % K)

    # Load model from this path, ! Please modify this path !
    model_location = "./DNNmodel/model_%d.ckpt" % (K)

    # Generate Testing Data
    num_test = 1000  # number of testing samples
    X, Y, wmmsetime = wf.generate_Gaussian(K, num_test, seed=7)

    # Testing Deep Neural Networks
    dnntime = df.test(X,
                      model_location,
                      "Prediction_%d" % K,
                      K * K,
                      K,
                      binary=1)
    print('wmmse time: %0.3f s, dnn time: %0.3f s, time speed up: %0.1f X' %
          (wmmsetime, dnntime, wmmsetime / dnntime))

    # Evaluate Performance of DNN and WMMSE
    H = np.reshape(X, (K, K, X.shape[1]), order="F")
    NNVbb = sio.loadmat('Prediction_%d.mat' % K)['pred']
    wf.perf_eval(H, Y, NNVbb, K)
Esempio n. 2
0
import function_dnn_powercontrol as df  # import our function file

K = 10  # number of users
num_H = 25000  # number of training samples
num_test = 5000  # number of testing  samples
training_epochs = 300  # number of training epochs
trainseed = 0  # set random seed for training set
testseed = 7  # set random seed for test set
Beta = [10**(-3), 10**(-4), 10**(-6)]
epochs = np.arange(training_epochs)
# Problem Setup
print('Gaussian IC Case: K=%d, Total Samples: %d, Total Iterations: %d\n' %
      (K, num_H, training_epochs))

# Generate Training Data
Xtrain, Ytrain, wtime = wf.generate_Gaussian(K, num_H, seed=trainseed)

# Training Deep Neural Networks
print('train DNN ...')
# Save & Load model from this path
model_location = "./DNNmodel/model_demo.ckpt"
train = []
time = []
val = []
for i in range(np.size(Beta)):
    df.train(Xtrain,
             Ytrain,
             model_location,
             training_epochs=training_epochs,
             traintestsplit=0.2,
             batch_size=200,