def run(param): assert "input_data_location" in param, "Error: parameter 'input_data_location' was not found" assert "experiment_name" in param, "Error: parameter 'experiment_name' was not found" assert "cache_size" in param, "Error: parameter 'cache_size' was not found" #assert "algorithm" in param, "Error: parameter 'algorithm' was not found" ########################################################################### ## Specify input folder ## Create a file input_data_location.txt and put in the config folder ########################################################################### DATA_FOLDER = param["input_data_location"] experiment_name = param['experiment_name'] ############################################################### ## Read data ############################################################### trace_obj = Trace(512) trace_obj.read(DATA_FOLDER + experiment_name) pages = trace_obj.get_request() pages = pages[:int(param['trace_limit'] )] if 'trace_limit' in param else pages num_pages = len(pages) unique_pages = trace_obj.unique_pages() cache_size_per = float(param['cache_size']) param['cache_size'] = int( round(unique_pages * cache_size_per)) if cache_size_per < 1 else int(cache_size_per) print("{:<40} {:<20} {:<20} ".format("Name", "Total Request", "Unique Pages")) print("\n") print("{n:<40} {r:<20} {u:<20} ".format(n=experiment_name, r=num_pages, u=unique_pages))
def run(param): assert "input_data_location" in param, "Error: parameter 'input_data_location' was not found" assert "experiment_name" in param, "Error: parameter 'experiment_name' was not found" assert "cache_size" in param, "Error: parameter 'cache_size' was not found" assert "algorithm" in param, "Error: parameter 'algorithm' was not found" experiment_name = param['experiment_name'] ########################################################################### ## Specify input folder ## Create a file input_data_location.txt and put in the config folder ########################################################################### DATA_FOLDER = param["input_data_location"] ########################################################################### ## Specify output location ## Create a file output_data_location.txt and put in the config folder ## This file should contain the path where the outputs will be saved ########################################################################### ############################################################### ## Read data ############################################################### trace_obj = Trace(512) trace_obj.read(DATA_FOLDER + experiment_name) pages = trace_obj.get_request() num_pages = len(pages) unique_pages = trace_obj.unique_pages() cache_size_per = float(param['cache_size']) if cache_size_per < 1: param['cache_size'] = int(round(unique_pages * cache_size_per)) else: param['cache_size'] = int(cache_size_per) algo = GetAlgorithm(param['algorithm'])(param) # if algorithm.lower() == "lecar" : # if "learning_rate" in param: # algo.learning_rate = float(param['learning_rate']) # if "history_size" in param: # algo.history_size = float(param['history_size']) start = time.time() hits, part_hit_rate, hit_sum = algo.test_algorithm(pages, partition_size=int( 0.01 * len(pages))) end = time.time() # result = "{:<20} {:<20} {:<20} {:<20} {:<20} {:<20}".format(algorithm, round(100.0 * hits / num_pages,2), hits, num_pages, trace_obj.unique_pages(), round(end-start,3)) # print(result) sys.stdout.flush() return round(100.0 * hits / num_pages, 2), round(end - start, 3)
def run(param, ax_weight, ax_hoarding, ax_hitrate): assert "input_data_location" in param, "Error: parameter 'input_data_location' was not found" assert "experiment_name" in param, "Error: parameter 'experiment_name' was not found" assert "cache_size" in param, "Error: parameter 'cache_size' was not found" assert "algorithm" in param, "Error: parameter 'algorithm' was not found" ########################################################################### ## Specify input folder ## Create a file input_data_location.txt and put in the config folder ########################################################################### DATA_FOLDER = param["input_data_location"] experiment_name = param['experiment_name'] ############################################################### ## Read data ############################################################### trace_obj = Trace(512) trace_obj.read(DATA_FOLDER+experiment_name) pages = trace_obj.get_request() pages = pages[:int(param['trace_limit'])] if 'trace_limit' in param else pages num_pages = len(pages) unique_pages = trace_obj.unique_pages() cache_size_per = float(param['cache_size']) param['cache_size'] = int(round(unique_pages*cache_size_per)) if cache_size_per < 1 else int(cache_size_per) ############################################################### ## Simulate algorithm ############################################################### algo = GetAlgorithm(param['algorithm'])(param) averaging_window_size = int(0.01*len(pages)) start = time.time() hits, _, hit_sum = test_algorithm(algo, pages, partition_size=averaging_window_size) end = time.time() ############################################################### ## Visualize ############################################################### visualize = 'visualize' in param and bool(param['visualize']) if visualize : for v in trace_obj.vertical_lines : ax_hitrate.axvline(x=v,color='g',alpha=0.75) ax_weight.axvline(x=v,color='g',alpha=0.75) ax_hoarding.axvline(x=v,color='g',alpha=0.75) temp = np.append(np.zeros(averaging_window_size), hit_sum[:-averaging_window_size]) hitrate = (hit_sum-temp) / averaging_window_size ax_hitrate.set_xlim(0, len(hitrate)) ax_hitrate.plot(range(len(hitrate)), hitrate,label=param['algorithm'], alpha=0.8) if param['algorithm'].lower() == "lecar8" or param['algorithm'].lower() == "lecar4": algo.visualize(ax_weight, ax_hoarding, averaging_window_size) #else: # algo.visualize(ax_weight) del pages[:] return round(100.0 * hits / num_pages,2), round(end-start,3)
def run(param, ax_weight, ax_hitrate, exp_cnt): assert "input_data_location" in param, "Error: parameter 'input_data_location' was not found" assert "experiment_name" in param, "Error: parameter 'experiment_name' was not found" assert "cache_size" in param, "Error: parameter 'cache_size' was not found" assert "algorithm" in param, "Error: parameter 'algorithm' was not found" ########################################################################### ## Specify input folder ## Create a file input_data_location.txt and put in the config folder ########################################################################### DATA_FOLDER = param["input_data_location"] experiment_name = param['experiment_name'] ############################################################### ## Read data ############################################################### trace_obj = Trace(512) trace_obj.read(DATA_FOLDER + experiment_name) pages = trace_obj.get_request() pages = pages[:int(param['trace_limit'] )] if 'trace_limit' in param else pages num_pages = len(pages) unique_pages = trace_obj.unique_pages() cache_size_per = float(param['cache_size']) param['cache_size'] = int( round(unique_pages * cache_size_per)) if cache_size_per < 1 else int(cache_size_per) ############################################################### ## Simulate algorithm ############################################################### print("Experiment name:", experiment_name.split("-")[0], ", Cache size:", cache_size_per) algo = GetAlgorithm(param['algorithm'])(param) averaging_window_size = int(0.01 * len(pages)) start = time.time() hits, _, hit_sum = test_algorithm(algo, pages, partition_size=averaging_window_size) end = time.time() ############################################################### ## Visualize ############################################################### visualize = 'visualize' in param and bool(param['visualize']) if visualize: algo_name = get_algo_name(param) for v in trace_obj.vertical_lines: ax_hitrate.axvline(x=v, color='g', alpha=0.75) if param['algorithm'].lower() == "lecar8": ax_weight.axvline(x=v, color='g', alpha=0.75) temp = np.append(np.zeros(averaging_window_size), hit_sum[:-averaging_window_size]) hitrate = (hit_sum - temp) / averaging_window_size ax_hitrate.set_xlim(0, len(hitrate)) hitrate_plot = round(100.0 * hits / num_pages, 2) colors = ["red", "green", "blue"] # ax_hitrate.set_title('LeCaR (Learning Rate vs Hit Rate)') if param['algorithm'].lower() == "lecar8": ax_hitrate.plot(range(len(hitrate)), hitrate, label=algo_name + " - " + str(hitrate_plot), color=colors[exp_cnt % 3], alpha=0.8) else: ax_hitrate.plot(range(len(hitrate)), hitrate, label=algo_name + "(LR:" + param['learning_rate'] + ") - " + str(hitrate_plot), color=colors[exp_cnt % 3], alpha=0.8) if param['algorithm'].lower() == "lecar8": learnig_rates = algo.getLearningRates() print("Mean Learning Rate", np.mean(learnig_rates)) print("Max Learning Rate", np.max(learnig_rates)) print("Min Learning Rate", np.min(learnig_rates)) ax_weight.set_ylabel('Learning Rate') ax_weight.plot(range(len(learnig_rates)), learnig_rates, 'r-', linewidth=3) del pages[:] return round(100.0 * hits / num_pages, 2), round(end - start, 3)
visualizeInternalStatePlot = True #experiment_name.endswith('.txt') ############################################################### ## Save data here ############################################################### data_dict = {} ############################################################### ## Plot title ############################################################### ############################################################### ## Read data ############################################################### trace_obj = Trace(blocksize) trace_obj.read(DATA_FOLDER + experiment_name) pages = trace_obj.get_request() num_pages = len(pages) unique_pages = trace_obj.unique_pages() if cache_size_per < 1: cache_size = int(round(unique_pages * cache_size_per)) cache_size_label = str(float(cache_size_per)) else: cache_size = int(cache_size_per) cache_size_label = str(cache_size) averaging_window_size = int(0.01 * len(pages)) print 'averaging_window_size = ', averaging_window_size
visualizeInternalStatePlot = True #experiment_name.endswith('.txt') ############################################################### ## Save data here ############################################################### data_dict = {} ############################################################### ## Plot title ############################################################### ############################################################### ## Read data ############################################################### trace_obj = Trace(blocksize) trace_obj.read(DATA_FOLDER+experiment_name) pages = trace_obj.get_request() num_pages = len(pages) unique_pages = trace_obj.unique_pages() if cache_size_per < 1: cache_size = int(round(unique_pages*cache_size_per)) cache_size_label = str(float(cache_size_per)) else : cache_size = int(cache_size_per) cache_size_label = str(cache_size) averaging_window_size = int(0.01*len(pages)) print 'averaging_window_size = ', averaging_window_size