def task_3_2_1(): """ This function plots two histograms for verification of the random distributions. One histogram is plotted for a uniform distribution, the other one for an exponential distribution. """ # TODO Task 3.2.1: Your code goes here no_of_runs = 1000 rns_exp = ExponentialRNS(5.0) rns_uni = UniformRNS(1, 300) exponential_values = [] uniform_values = [] weight = numpy.full(no_of_runs, 1.0 / float(no_of_runs)) while no_of_runs != 0: exponential_values.append(rns_exp.next()) uniform_values.append(rns_uni.next()) no_of_runs -= 1 pyplot.subplot(121) pyplot.title("Uniform Distribution") pyplot.xlabel("x") pyplot.ylabel("Distribution") pyplot.hist(uniform_values, bins=25, weights=weight) pyplot.subplot(122) pyplot.title("Exponential distribution for Lambda = 5") pyplot.xlabel("x") pyplot.ylabel("Distribution") pyplot.hist(exponential_values, bins=25, weights=weight) pyplot.show()
def task_3_2_1(): """ This function plots two histograms for verification of the random distributions. One histogram is plotted for a uniform distribution, the other one for an exponential distribution. """ rns_exp = ExponentialRNS(1, the_seed=0) rns_uni = UniformRNS(1, 100, the_seed=0) n = 10000 exp_distr = [] uni_distr = [] weights = [] for _ in range(n): exp_distr.append(rns_exp.next()) uni_distr.append(rns_uni.next()) weights.append(1. / float(n)) pyplot.subplot(121) pyplot.hist(exp_distr, bins=30, weights=weights, edgecolor='black') pyplot.xlabel("x") pyplot.ylabel("distribution over n") pyplot.title("Exponential distribution") pyplot.subplot(122) pyplot.hist(uni_distr, bins=30, weights=weights, edgecolor='black') pyplot.xlabel("x") pyplot.ylabel("distribution over n") pyplot.title("Uniform distribution") pyplot.show()
def task_3_2_1(): """ This function plots two histograms for verification of the random distributions. One histogram is plotted for a uniform distribution, the other one for an exponential distribution. """ # TODO Task 3.2.1: Your code goes here exp_rns = ExponentialRNS(mean=1, the_seed=0) uni_rns = UniformRNS(low=2, high=3, the_seed=0) exp_list = [] uni_list = [] for i in range(100000): x = exp_rns.next() y = uni_rns.next() exp_list.append(x) uni_list.append(y) weights1 = numpy.full(len(exp_list), 1.0 / float(len(exp_list))) weights2 = numpy.full(len(uni_list), 1.0 / float(len(uni_list))) pyplot.hist(exp_list, bins=10, weights=weights1, histtype='bar') pyplot.xlabel("Exponential") pyplot.show() pyplot.hist(uni_list, bins=10, weights=weights2, histtype='bar') pyplot.xlabel("Uniform") pyplot.show() pass
def task_3_2_1(): """ This function plots two histograms for verification of the random distributions. One histogram is plotted for a uniform distribution, the other one for an exponential distribution. """ # TODO Task 3.2.1: Your code goes here # For exponential dist. & uniform dist. samples = 10000 exp = ExponentialRNS(1.) uni = UniformRNS(0.,10.) exp_dist = [] uni_dist = [] for k in range(samples): exp_dist.append(exp.next()) uni_dist.append(uni.next()) #weights_ = numpy.full(len(uni_dist), 1.0 / float(len(uni_dist))) ax0=plt.subplot(121) plt.xlabel("x") plt.ylabel("Number of instances") ax0.set_title("Exponential Distribution") plt.hist(exp_dist, density=True, bins=int(samples/100)) ax1=plt.subplot(122) plt.xlabel("x") plt.ylabel("Probability density function of bin") ax1.set_title("Uniform Distribution") plt.hist(uni_dist,density=True,bins=int(samples/100)) plt.show()
def initialize_slices(sim_param, log_file): slices = [] # SLA requirements list [RR, MCQI PF] delay_thresholds = sim_param.delay_requirements# [50, 50, 50] # ms rate_thresholds = sim_param.rate_requirements# [2000, 2000, 2000]#[2000, 3200, 2000] # kbps float(slc.slice_param.P_SIZE / slc.slice_param.MEAN_IAT) packet_sizes = sim_param.packet_sizes# [10000, 10000, 10000] # [2000, 10000, 5000] # in bits mean_iats = sim_param.mean_iats# [5, 5, 5] # dist_arr = [[10]*10,[10]*10,[10]*10]#[[10, 10, 10, 10, 10, 100, 100, 100, 100, 100],[12]*10,[10, 10, 10, 10, 10, 100, 100, 100, 100, 100]] seed_dist = sim_param.SEED_OFFSET # users in all slices have identical distance distributions # rng_dist = RNG(ExponentialRNS(lambda_x=1. / float(sim_param.MEAN_Dist)), s_type='dist') # , the_seed=seed_dist rng_dist = RNG (UniformRNS (sim_param.DIST_MIN, sim_param.DIST_MAX, the_seed=seed_dist), s_type='dist') tmp_user_id=0 for i in range(sim_param.no_of_slices): slice_param_tmp = SliceParam(sim_param) slice_param_tmp.SLICE_ID = i slice_param_tmp.P_SIZE = packet_sizes[i] slice_param_tmp.MEAN_IAT = mean_iats[i] # SLA requirements slice_param_tmp.DELAY_REQ = delay_thresholds[i] slice_param_tmp.RATE_REQ = rate_thresholds[i] slices.append(SliceSimulation(slice_param_tmp)) # initialize all users with traffics and distances tmp_users = [] # dist_arr_tmp = dist_arr[i] # [10, 10, 10, 10, 10, 100, 100, 100, 100, 100] # 10*(1+user_id%no_of_users_per_slice)**2 for j in range(sim_param.no_of_users_list[i]): # user_id = i * sim_param.no_of_users_per_slice + j tmp_users.append(User(tmp_user_id, rng_dist.get_dist(), slice_list=[slices[i]], sim_param=sim_param)) # tmp_users.append(User(user_id, dist_arr_tmp[j], slice_list=[slices[i]], sim_param=sim_param)) tmp_user_id+=1 # insert users to slice slices[i].insert_users(tmp_users) # Choose Slice Manager Algorithm slices[i].slice_param.SM_ALGO = sim_param.SM_ALGO_list[i] # # Choose Slice Manager Algorithm 'PF': prop fair, 'MCQI': Max Channel Quality Index, 'RR': round-robin # slices[0].slice_param.SM_ALGO = 'RR' # slices[1].slice_param.SM_ALGO = 'MCQI' # slices[2].slice_param.SM_ALGO = 'PF' if log_file is None: pass else: # log Slice Parameters for i in range(sim_param.no_of_slices): attrs = vars(slices[i].slice_param) log_file.write('\nSliceParam\n' + ''.join("%s: %s\n" % item for item in attrs.items())) log_file.close() return slices
def task_3_2_1(): """ This function plots two histograms for verification of the random distributions. One histogram is plotted for a uniform distribution, the other one for an exponential distribution. """ # TODO Task 3.2.1: Your code goes here sim_param = SimParam() random.seed(sim_param.SEED) sim_param.RHO = 0.01 sim = Simulation(sim_param) rns_iat = ExponentialRNS(1.0) rns_st = ExponentialRNS(1.0/sim.sim_param.RHO) rns_uniform = UniformRNS((2,4)) hist1 = TimeIndependentHistogram(sim, "Line") hist2 = TimeIndependentHistogram(sim, "Line") hist3 = TimeIndependentHistogram(sim, "bp") for i in range(1000000): hist1.count(rns_iat.next()) hist2.count(rns_st.next()) hist3.count(rns_uniform.next()) hist1.report() hist2.report() hist3.report()
def ran_simulation(): """ Main ran_simulation """ # define sim_param and inside RB pool: all available Resources list sim_param = SimParam() no_of_slices = sim_param.no_of_slices no_of_users_per_slice = sim_param.no_of_users_per_slice # create result directories create_dir(sim_param) # create logfile and write SimParameters results_dir = "results/" + sim_param.timestamp log_file = open(results_dir + "/logfile.txt", "wt") log_file.write('no_of_slices: %d\nno_of_users_per_slice: %d\n\n' % (no_of_slices, no_of_users_per_slice)) attrs = vars(sim_param) log_file.write('SimParam\n'+''.join("%s: %s\n" % item for item in attrs.items())) # log_file.close() # initialize SD_RAN_Controller SD_RAN_Controller = Controller(sim_param) # Each slice has different users slices = [] slice_results = [] # initialize all slices for i in range(no_of_slices): slice_param_tmp = SliceParam(sim_param) slice_param_tmp.SLICE_ID = i slices.append(SliceSimulation(slice_param_tmp)) slice_results.append([]) # initialize all users with traffics and distances tmp_users = [] seed_dist = 0 # users in all slices have identical distance distributions #rng_dist = RNG(ExponentialRNS(lambda_x=1. / float(sim_param.MEAN_Dist)), s_type='dist') # , the_seed=seed_dist rng_dist = RNG(UniformRNS(sim_param.DIST_MIN,sim_param.DIST_MAX, the_seed=seed_dist), s_type='dist') # dist_arr = [10, 100 ]#[30, 30, 100, 100, 100, 100, 100, 100, 100, 100] # 10*(1+user_id%no_of_users_per_slice)**2 for j in range(no_of_users_per_slice): user_id = i*no_of_users_per_slice + j #tmp_users.append(User(user_id, rng_dist.get_dist(), slice_list=[slices[i]], sim_param=sim_param)) tmp_users.append(User(user_id, dist_arr[j], slice_list=[slices[i]], sim_param=sim_param)) # insert user to slices slices[i].insert_users(tmp_users) # Choose Slice Manager Algorithm, 'PF': prop fair, 'MCQI': Max Channel Quality Index, 'RR': round-robin slices[0].slice_param.SM_ALGO = 'RR' slices[1].slice_param.SM_ALGO = 'MCQI' slices[2].slice_param.SM_ALGO = 'PF' # log Slice Parameters for i in range(no_of_slices): attrs = vars(slices[i].slice_param) log_file.write('\nSliceParam\n' + ''.join("%s: %s\n" % item for item in attrs.items())) #log_file.close() # loop rounds for each slice for i in range(int(sim_param.T_FINAL/sim_param.T_C)): RB_mapping = SD_RAN_Controller.RB_allocate_to_slices(slices[0].sim_state.now, slices) for j in range(len(slices)): slices[j].prep_next_round(RB_mapping[j,:,:]) slice_results[j].append(slices[j].simulate_one_round()) # Store Simulation Results # user results parent_dir = "results/" + sim_param.timestamp + "/user_results" path = parent_dir + "/tp" for i in range(len(slice_results)): user_count = len(slice_results[i][-1].server_results) # choose latest result for data for k in range(user_count): common_name = "/slice%d_user%d_" % (i, slice_results[i][-1].server_results[k].server.user.user_id) cc_temp = slice_results[i][-1].server_results[k].server.counter_collection # tp filename = parent_dir + "/tp" + common_name + "sum_power_two.csv" savetxt(filename, cc_temp.cnt_tp.sum_power_two, delimiter=',') filename = parent_dir + "/tp" + common_name + "values.csv" savetxt(filename, cc_temp.cnt_tp.values, delimiter=',') filename = parent_dir + "/tp" + common_name + "timestamps.csv" savetxt(filename, cc_temp.cnt_tp.timestamps, delimiter=',') filename = parent_dir + "/tp" + common_name + "all_data.csv" #savetxt(filename, np.transpose(np.array([cc_temp.cnt_tp.values,cc_temp.cnt_tp.timestamps])), delimiter=',') df = DataFrame(np.transpose(np.array([cc_temp.cnt_tp.values,cc_temp.cnt_tp.timestamps])), columns=['Values', 'Timestamps']) export_csv = df.to_csv(filename, index=None, header=True) # tp2 filename = parent_dir + "/tp2" + common_name + "sum_power_two.csv" savetxt(filename, cc_temp.cnt_tp2.sum_power_two, delimiter=',') filename = parent_dir + "/tp2" + common_name + "values.csv" savetxt(filename, cc_temp.cnt_tp2.values, delimiter=',') filename = parent_dir + "/tp2" + common_name + "timestamps.csv" savetxt(filename, cc_temp.cnt_tp2.timestamps, delimiter=',') # ql filename = parent_dir + "/ql" + common_name + "sum_power_two.csv" savetxt(filename, cc_temp.cnt_ql.sum_power_two, delimiter=',') filename = parent_dir + "/ql" + common_name + "values.csv" savetxt(filename, cc_temp.cnt_ql.values, delimiter=',') filename = parent_dir + "/ql" + common_name + "timestamps.csv" savetxt(filename, cc_temp.cnt_ql.timestamps, delimiter=',') # system time (delay) filename = parent_dir + "/delay" + common_name + "values.csv" savetxt(filename, cc_temp.cnt_syst.values, delimiter=',') filename = parent_dir + "/delay" + common_name + "timestamps.csv" savetxt(filename, cc_temp.cnt_syst.timestamps, delimiter=',') # Find how to insert histograms # plot results parent_dir = "results/" + sim_param.timestamp plot_results(parent_dir, no_of_slices, no_of_users_per_slice, sim_param, slices) # rb dist printing filename = "results/" + sim_param.timestamp + "/summary" rb_total = 0 rb_dist = [] for s in slices: rb_dist_slice = [] for u in s.server_list: rb_dist_slice.append(u.RB_counter) slicesum = np.nansum(rb_dist_slice) print("Slice %d dist: " % s.slice_param.SLICE_ID, *np.round(np.divide(rb_dist_slice,slicesum/100), 1)) # write these to file savetxt(filename, cc_temp.cnt_ql.sum_power_two, delimiter=',') rb_dist.append(slicesum) totalsum = np.nansum(rb_dist) print("rb dist (RR MCQI PF): ", *np.round(np.divide(rb_dist, totalsum / 100), 1))