def run_dataflow(layer_index, env_config=None): env = expr_parameters.Environment() env.pe_pipeline_stages = 1 layer_args = layer_info.resnet_parameters[layer_index] layer = ConvLayer(env, **layer_args) base_TCs = layer.base_TCs tcs = [base_TCs[iv] for iv in IV_ORDER] return generate_combinations(layer_index, tcs, env_config)
def find_config(args): config_file_name = args.config_file_name n_PEs = dse_parameters.n_PEs energy_rf = dse_parameters.energy_rf energy_spm = dse_parameters.energy_spm env = expr_parameters.Environment() env.pe_pipeline_stages = 1 layer_args = layer_info.resnet_parameters[args.layer_index] layer = ConvLayer(env, **layer_args) opt_parameters = {} """ energy_rf = { 256: 0.48, } energy_spm = { 4096: 99.0984375, } n_PEs = [64, 128, 256] """ #pbar = tqdm.tqdm(total=len(n_PEs)*len(energy_spm)*len(energy_rf)) params = [] for n_PE in n_PEs: for spm_size, spm_energy in energy_spm.items(): for rf_size, rf_energy in energy_rf.items(): if pass_config(n_PE, rf_size, spm_size): continue param = (layer, n_PE, spm_size, rf_size, spm_energy, rf_energy) params.append(param) pbar = tqdm.tqdm(total=len(params)) multiprocessing = True if multiprocessing: with concurrent.futures.ProcessPoolExecutor() as executor: for ret_tuple in executor.map(find_opt_parameters, params): key, global_config, env_config, num_evaluations = ret_tuple pbar.update() opt_parameters[key] = (global_config, env_config, num_evaluations) else: for param in params: key, global_config, env_config, num_evaluations = find_opt_parameters(param) """ key = (n_PE, spm_size, rf_size) opt_parameters[key], num_evaluations = find_opt_parameters(layer, n_PE, spm_size, rf_size, spm_energy, rf_energy) print(key, num_evaluations) """ print(opt_parameters) pickle.dump(opt_parameters, open(config_file_name+".p", "wb"))
def get_sample_layer(): env = expr_parameters.Environment() args = { "name": "test_conv2d12_from_resnet18_v1", "channels": 256, "kernel_size": "[3, 3]", "padding": "[1, 1]", "strides": "[1, 1]", "output_shape": [1, 256, 14, 14], "input_shape": [1, 256, 14, 14], "batch_size": 4, } layer = ConvLayer(env, batch=4, **args) layer.set_tiling("N", [4, 1, 1, 1]) layer.set_tiling("M", [16, 1, 8, 2]) layer.set_tiling("C", [8, 16, 1, 2]) layer.set_tiling("Ox", [1, 1, 2, 7]) layer.set_tiling("Oy", [1, 1, 2, 7]) layer.set_tiling("Fx", [1, 1, 3, 1]) layer.set_tiling("Fy", [1, 1, 3, 1]) return layer
def get_sample_layer(env): args = { # ResNet Conv5_2 layer "name": "test_conv2d12_from_resnet18_v1", "channels": 512, "kernel_size": "[3, 3]", "padding": "[1, 1]", "strides": "[1, 1]", "output_shape": [1, 512, 7, 7], "input_shape": [1, 512, 7, 7], "batch_size": 4, } layer = ConvLayer(env, **args) layer.set_tiling("N", [1, 1, 4, 1]) layer.set_tiling("M", [4, 4, 16, 2]) layer.set_tiling("C", [32, 1, 8, 2]) layer.set_tiling("Ox", [1, 1, 1, 7]) layer.set_tiling("Oy", [1, 1, 1, 7]) layer.set_tiling("Fx", [1, 3, 1, 1]) layer.set_tiling("Fy", [1, 3, 1, 1]) return layer
def run_dse(args): config_file_name = args.config_file_name + ".p" global_configs = pickle.load(open(config_file_name, "rb")) env = expr_parameters.Environment() env.pe_pipeline_stages = 1 layer_args = layer_info.resnet_parameters[args.layer_index] layer = ConvLayer(env, **layer_args) columns = ("spm_size", "rf_size", "n_PE", "edp", "total_evaluated", "start_time", "end_time") final_df = pd.DataFrame(columns=columns) records = [] print("evaluating {} design points".format(len(global_configs))) i = 1 for key, (global_config, env_config, num_evaluations) in global_configs.items(): n_PE, spm_size, rf_size = key if global_config == None: min_edp = -1 start_time, end_time = None, None else: start_time = datetime.datetime.now() result = optimizer.optimize(layer, global_config, env_config) min_edp = result["min_edp"] end_time = datetime.datetime.now() records.append((spm_size, rf_size, n_PE, min_edp, num_evaluations, start_time, end_time)) """ final_df.loc[len(final_df)] = [spm_size, rf_size, n_PE, min_edp, num_evaluations, start_time, end_time] """ print("{}/{}, evaluated: {}\n".format(i, len(global_configs), num_evaluations)) i += 1 final_df = pd.DataFrame.from_records(records, columns=columns) final_df = final_df.astype({ "spm_size": int, "rf_size": int, "n_PE": int, "total_evaluated": int, }) pickle.dump(final_df, open("dse_output"+config_file_name+".p", "wb")) file_name = "dse_output_layer_{}.xlsx".format(config_file_name) final_df.to_excel(file_name)
def main(): args = parse_arguments() layer_arguments = layer_info.resnet_parameters[int(args.layer)] env = expr_parameters.Environment() env.pe_pipeline_stages = 1 layer = ConvLayer(env=env, **layer_arguments) print("compiling layer", args.layer) params = expr_parameters.ExprParameters(env) if args.opt_utilization: params.PE_UTILIZATION = 0.8 params.RF_UTILIZATION = 0.8 params.SPM_UTILIZATION = 0.5 params.PRUNE_NO_FEATURE_DRAM = True if args.opt_no_feature_dram else False params.PRUNE_NO_REDUCTION = True if args.opt_no_spatial_reduction else False start_time = datetime.datetime.now() result = optimizer.optimize(layer, params) end_time = datetime.datetime.now() delta = end_time - start_time num_evaluations = optimizer.get_num_evaluations(layer, params) print("Optimized energy-delay product (EDP): %.4E" % result["min_edp"]) print("Execution method for optimized EDP: \nTiling factors (DRAM, SPM, RF, SPATIAL): ", result["min_edp_seq"]) print("Ordering (DRAM, SPM):", result["min_edp_ordering"]) print("Minimized energy: %.4E" % result["min_energy"]) print("Execution method for minimized energy: \nTiling factors (DRAM, SPM, RF, SPATIAL): ", result["min_energy_seq"]) print("Ordering (DRAM, SPM):", result["min_energy_ordering"]) print("Minimized execution cycles: %.4E" % result["min_cycle"]) print("Execution method for minimized execution cycles: \nTiling factors (DRAM, SPM, RF, SPATIAL): ", result["min_cycle_seq"]) print("Ordering (DRAM, SPM):", result["min_cycle_ordering"]) print("Execution methods evaluated:", num_evaluations) print("Time spent in exploration: {} seconds".format(delta.total_seconds())) return
def get_sample_layer(): env = expr_parameters.Environment() env.pe_pipeline_stages = 1 args = { "name": "test_conv2d12_from_resnet18_v1", "channels": 64, "kernel_size": "[6, 6]", "padding": "[2, 2]", "strides": "[2, 2]", "output_shape": [1, 64, 5, 5], "input_shape": [1, 64, 10, 10], "batch_size": 8, } layer = ConvLayer(env, **args) layer.set_tiling("N", [2, 2, 2, 1]) layer.set_tiling("M", [2, 4, 2, 4]) layer.set_tiling("C", [4, 2, 2, 4]) layer.set_tiling("Ox", [1, 5, 1, 1]) layer.set_tiling("Oy", [1, 1, 5, 1]) layer.set_tiling("Fx", [1, 2, 3, 1]) layer.set_tiling("Fy", [2, 1, 3, 1]) for level in layer._loop_IVs: layer.set_ordering(level, ["N", "C", "Fx", "Fy", "M", "Ox", "Oy"]) return layer
def run_thread(params, count_only=False): if len(params) == 4: layer, tc_list, spatial_factor, env_config = params else: layer_index, tc_list, spatial_factor, env_config, expr_params = params global PARAMS PARAMS = expr_params if env_config != None: PARAMS.env.rf_energy = env_config["rf_energy"] PARAMS.env.spm_energy = env_config["spm_energy"] if len(params) == 5: layer_args = layer_info.resnet_parameters[layer_index] layer = ConvLayer(PARAMS.env, **layer_args) try: stride = layer.strides except: stride = None tc_list_after_spatial = [ int(x/y) for x, y in zip(tc_list, spatial_factor) ] tc_list_factors_spatial = [ factors(tc) for tc in tc_list_after_spatial ] min_energy = float("inf") min_energy_sequence = None min_edp = float("inf") min_edp_sequence = None min_cycle = float("inf") min_cycle_sequence = None evaluated = 0 for rf_factor in of_bucket(tc_list_factors_spatial): if not valid_rf(layer, stride, rf_factor): continue tc_list_after_rf = [ int(x/y) for x, y in zip(tc_list_after_spatial, rf_factor)] tc_list_factors_rf = [ factors(tc) for tc in tc_list_after_rf] for spm_factor in of_bucket(tc_list_factors_rf): spatial_rf_factor = ( x*y for x, y in zip(spatial_factor, rf_factor)) if not valid_spm(layer, stride, spatial_rf_factor, spm_factor): continue tc_list_after_spm = tuple([ int(x/y) for x, y in zip(tc_list_after_rf, spm_factor) ]) dram_factor = tc_list_after_spm if not valid_dram(layer, dram_factor): continue #assert tc_list == [ x*y*z*w for x,y,z,w in zip(spatial_factor, rf_factor, spm_factor, dram_factor)] evaluated += 1 if count_only: continue IV_ORDER = layer._default_loop_order for idx in range(len(IV_ORDER)): tiling_factor = [dram_factor[idx], spm_factor[idx], rf_factor[idx], spatial_factor[idx]] layer.set_tiling(IV_ORDER[idx], tiling_factor) edp, energy, cycle = layer.get_min_edp_energy_cycle() if edp < min_edp: min_edp = edp min_edp_sequence = (dram_factor, spm_factor, rf_factor, spatial_factor) if energy < min_energy: min_energy = energy min_energy_sequence = (dram_factor, spm_factor, rf_factor, spatial_factor) if cycle < min_cycle: min_cycle = cycle min_cycle_sequence = (dram_factor, spm_factor, rf_factor, spatial_factor) """ file_name = "_".join([ str(i) for i in spatial_factor ]) file_name = str(layer_index) + "_" + file_name output_file = "out/" + expr_name + "/" + file_name + ".txt" with open(output_file, "w") as outFile: line = ",".join([ str(item) for item in [min_edp, min_edp_sequence, min_energy, min_energy_sequence] ]) outFile.write(line) """ if count_only: return evaluated return min_edp, min_edp_sequence, min_energy, min_energy_sequence, min_cycle, min_cycle_sequence, evaluated
def run_thread(params): layer_index, tc_list, spatial_factor, env_config = params if env_config != None: env = expr_parameters.Environment( rf_energy=env_config["rf_energy"], spm_energy=env_config["spm_energy"], ) else: env = expr_parameters.Environment() env.pe_pipeline_stages = 1 layer_args = layer_info.resnet_parameters[layer_index] layer = ConvLayer(env, **layer_args) stride = layer.strides tc_list_after_spatial = [ int(x / y) for x, y in zip(tc_list, spatial_factor) ] tc_list_factors_spatial = [factors(tc) for tc in tc_list_after_spatial] min_energy = float("inf") min_energy_sequence = None min_edp = float("inf") min_edp_sequence = None evaluated = 0 for rf_factor in of_bucket(tc_list_factors_spatial): if not valid_rf(stride, rf_factor): continue tc_list_after_rf = [ int(x / y) for x, y in zip(tc_list_after_spatial, rf_factor) ] tc_list_factors_rf = [factors(tc) for tc in tc_list_after_rf] for spm_factor in of_bucket(tc_list_factors_rf): spatial_rf_factor = (x * y for x, y in zip(spatial_factor, rf_factor)) if not valid_spm(stride, spatial_rf_factor, spm_factor): continue tc_list_after_spm = tuple( [int(x / y) for x, y in zip(tc_list_after_rf, spm_factor)]) dram_factor = tc_list_after_spm if not valid_dram(dram_factor): continue #assert tc_list == [ x*y*z*w for x,y,z,w in zip(spatial_factor, rf_factor, spm_factor, dram_factor)] evaluated += 1 for idx in range(len(IV_ORDER)): tiling_factor = [ dram_factor[idx], spm_factor[idx], rf_factor[idx], spatial_factor[idx] ] layer.set_tiling(IV_ORDER[idx], tiling_factor) edp, energy, cycle = layer.get_min_edp_energy_cycle() if edp < min_edp: min_edp = edp min_edp_sequence = (dram_factor, spm_factor, rf_factor, spatial_factor) if energy < min_energy: min_energy = energy min_energy_sequence = (dram_factor, spm_factor, rf_factor, spatial_factor) """ file_name = "_".join([ str(i) for i in spatial_factor ]) file_name = str(layer_index) + "_" + file_name output_file = "out/" + expr_name + "/" + file_name + ".txt" with open(output_file, "w") as outFile: line = ",".join([ str(item) for item in [min_edp, min_edp_sequence, min_energy, min_energy_sequence] ]) outFile.write(line) """ return min_edp, min_edp_sequence, min_energy, min_energy_sequence, evaluated
def get_joined_df(csv_dir, ref_files, our_test_dir): ref_dfs = [] for ref_file_name in ref_files: df = pd.read_csv(csv_dir + "/" + ref_file_name) ref_dfs.append(df) for i, ref_df in enumerate(ref_dfs): print("processing", ref_files[i]) layer_arguments = layer_info.resnet_parameters[layers[i]] env = expr_parameters.Environment() env.pe_pipeline_stages = 1 layer = ConvLayer(env, **layer_arguments) loop_IVs = ["N", "M", "C", "Ox", "Oy", "Fx", "Fy"] ref_df["energy_from_dMazeRunner"] = np.nan ref_df["cycle_from_dMazeRunner"] = np.nan ref_df["edp_from_dMazeRunner"] = np.nan ref_df["energy_diff_percent"] = np.nan ref_df["layer"] = layers[i] ref_df.drop(ref_df[ref_df["Verify Tiling"] != True].index, inplace=True) for index, row in ref_df.iterrows(): if not row["Verify Tiling"]: continue #set tiling dram_tiling = ast.literal_eval(row["DRAM_tiling"]) spm_tiling = ast.literal_eval(row["SPM_tiling"]) rf_tiling = ast.literal_eval(row["RF_tiling"]) spatial_tiling = ast.literal_eval(row["Spatial_tiling"]) for idx in range(len(loop_IVs)): tiling_factor = [ dram_tiling[idx], spm_tiling[idx], rf_tiling[idx], spatial_tiling[idx] ] layer.set_tiling(loop_IVs[idx], tiling_factor) #find key from ordering spm_ordering = ast.literal_eval(row["SPM_schedule"]) dram_ordering = ast.literal_eval(row["DRAM_schedule"]) spm_ordering = tuple([iv.title() for iv in spm_ordering]) dram_ordering = tuple([iv.title() for iv in dram_ordering]) spm_reuse_factor = layer.determine_data_reuse( "SPM", user_ordering=spm_ordering)[0] dram_reuse_factor = layer.determine_data_reuse( "DRAM", user_ordering=dram_ordering)[0] spm_ordering = layer.get_ordering_from_reuse_factor( spm_reuse_factor, "SPM") dram_ordering = layer.get_ordering_from_reuse_factor( dram_reuse_factor, "DRAM") key = (dram_ordering, spm_ordering) cycles_of_all_orderings = layer.get_Cycles_One_Layer() energy_of_all_orderings = layer.get_Energy_One_Layer() ##### use the set of ordering that minimizes energy energy, dram_ordering, spm_ordering = layer.get_min_energy() key = (dram_ordering, spm_ordering) ##### cycle, energy = cycles_of_all_orderings[ key], energy_of_all_orderings[key] ref_df.at[index, "energy_from_dMazeRunner"] = energy ref_df.at[index, "cycle_from_dMazeRunner"] = cycle ref_df.at[index, "edp_from_dMazeRunner"] = energy * cycle # energy distribution energy_distributions_of_all_orderings = layer.get_Energy_Distribution( ) energy_MAC, energy_RF, energy_NOC, energy_SPM, energy_DRAM = energy_distributions_of_all_orderings[ key] ref_df.at[index, "energy_MAC_dMazeRunner"] = energy_MAC ref_df.at[index, "energy_RF_dMazeRunner"] = energy_RF ref_df.at[index, "energy_NOC_dMazeRunner"] = energy_NOC ref_df.at[index, "energy_SPM_dMazeRunner"] = energy_SPM ref_df.at[index, "energy_DRAM_dMazeRunner"] = energy_DRAM energy_from_ref = row["Energy"] ref_df.at[index, "energy_diff_percent"] = 100 * \ abs(energy-energy_from_ref)/energy_from_ref stride = layer.strides n_banks, size_in_bytes = get_spm_bank_and_size( stride, spatial_tiling, rf_tiling, spm_tiling) ref_df.at[index, "spm_banks_yang_et_al"] = n_banks ref_df.at[index, "spm_size_in_bytes_yang_et_al"] = size_in_bytes final_df = pd.DataFrame() for layer_index, ref_df in enumerate(ref_dfs): def change_dataflow_name(old_name): name = old_name.replace("IC", "C") name = name.replace("OC", "M") name = name.replace("ON", "N") tokens = name.strip().split("_") if len(tokens) not in [2, 4]: return None if len(tokens) == 4: tokens = tokens[:2] x, y = tokens return (y.title() + " | " + x.title()) def parse_config(old_name): tokens = old_name.strip().split("_") if len(tokens) == 4: return tokens[2] + "_" + tokens[3] else: return None columns = list(ref_df.columns.values) columns.remove("layer") new_column_order = ["layer"] + columns ref_df = ref_df[new_column_order] ref_df["dataflow_str"] = ref_df["Data_flow_mechanism"].apply( change_dataflow_name) ref_df["dataflow_config"] = ref_df["Data_flow_mechanism"].apply( parse_config) final_df = pd.concat([final_df, ref_df]) final_df = final_df.rename(index=str, columns={ "dataflow_str": "dataflow", "Energy": "energy_theirs", "energy_from_dMazeRunner": "energy_dMazeRunner", "energy_best": "energy_optimal", "cycle_from_dMazeRunner": "cycle_dMazeRunner", "edp_from_dMazeRunner": "edp_dMazeRunner", "energy_diff_percent": "energy_diff_%", }) final_df["energy_RF_diff_%"] = (final_df["Energy_RF"] - final_df["energy_RF_dMazeRunner"] ) / final_df["energy_RF_dMazeRunner"] * 100 final_df["energy_NOC_diff_%"] = ( final_df["Energy_NoC"] - final_df["energy_NOC_dMazeRunner"] ) / final_df["energy_NOC_dMazeRunner"] * 100 final_df["energy_SPM_diff_%"] = ( final_df["Energy_SPM"] - final_df["energy_SPM_dMazeRunner"] ) / final_df["energy_SPM_dMazeRunner"] * 100 final_df["energy_DRAM_diff_%"] = ( final_df["Energy_DRAM"] - final_df["energy_DRAM_dMazeRunner"] ) / final_df["energy_DRAM_dMazeRunner"] * 100 return final_df