def get_sample_layer(): env = expr_parameters.Environment() env.pe_pipeline_stages = 1 args = { "name": "test_conv2d12_from_resnet18_v1", "channels": 8, "kernel_size": "[3, 3]", "padding": "[1, 1]", "strides": "[1, 1]", "output_shape": [1, 16, 10, 10], "input_shape": [1, 8, 10, 10], "batch_size": 4, } layer = ConvLayer(env, **args) layer.set_tiling("N", [2, 1, 2, 1]) layer.set_tiling("M", [2, 2, 1, 4]) layer.set_tiling("C", [2, 2, 2, 1]) layer.set_tiling("Ox", [1, 2, 5, 1]) layer.set_tiling("Oy", [1, 1, 10, 1]) layer.set_tiling("Fx", [1, 1, 3, 1]) layer.set_tiling("Fy", [1, 1, 3, 1]) for level in layer._loop_IVs: layer.set_ordering(level, ["N", "C", "Fx", "Fy", "M", "Ox", "Oy"]) return layer
def get_sample_layer(): env = expr_parameters.Environment() args = { "name": "test_conv2d12_from_resnet18_v1", "channels": 512, "kernel_size": "[3, 3]", "padding": "[1, 1]", "strides": "[1, 1]", "output_shape": [1, 512, 7, 7], "input_shape": [1, 512, 7, 7], "batch_size": 4, } layer = ConvLayer(env, **args) layer.set_tiling("N", [1, 1, 2, 2]) layer.set_tiling("M", [4, 32, 1, 4]) layer.set_tiling("C", [32, 1, 16, 1]) layer.set_tiling("Ox", [1, 1, 7, 1]) layer.set_tiling("Oy", [1, 1, 1, 7]) layer.set_tiling("Fx", [1, 3, 1, 1]) layer.set_tiling("Fy", [1, 1, 1, 3]) return layer
def get_sample_layer(env): args = { # ResNet Conv5_2 layer "name": "test_conv2d12_from_resnet18_v1", "channels": 512, "kernel_size": "[3, 3]", "padding": "[1, 1]", "strides": "[1, 1]", "output_shape": [1, 512, 7, 7], "input_shape": [1, 512, 7, 7], "batch_size": 4, } layer = ConvLayer(env, **args) layer.set_tiling("N", [1, 1, 4, 1]) layer.set_tiling("M", [4, 4, 16, 2]) layer.set_tiling("C", [32, 1, 8, 2]) layer.set_tiling("Ox", [1, 1, 1, 7]) layer.set_tiling("Oy", [1, 1, 1, 7]) layer.set_tiling("Fx", [1, 3, 1, 1]) layer.set_tiling("Fy", [1, 3, 1, 1]) return layer
def run_thread(params, count_only=False): if len(params) == 4: layer, tc_list, spatial_factor, env_config = params else: layer_index, tc_list, spatial_factor, env_config, expr_params = params global PARAMS PARAMS = expr_params if env_config != None: PARAMS.env.rf_energy = env_config["rf_energy"] PARAMS.env.spm_energy = env_config["spm_energy"] if len(params) == 5: layer_args = layer_info.resnet_parameters[layer_index] layer = ConvLayer(PARAMS.env, **layer_args) try: stride = layer.strides except: stride = None tc_list_after_spatial = [ int(x/y) for x, y in zip(tc_list, spatial_factor) ] tc_list_factors_spatial = [ factors(tc) for tc in tc_list_after_spatial ] min_energy = float("inf") min_energy_sequence = None min_edp = float("inf") min_edp_sequence = None min_cycle = float("inf") min_cycle_sequence = None evaluated = 0 for rf_factor in of_bucket(tc_list_factors_spatial): if not valid_rf(layer, stride, rf_factor): continue tc_list_after_rf = [ int(x/y) for x, y in zip(tc_list_after_spatial, rf_factor)] tc_list_factors_rf = [ factors(tc) for tc in tc_list_after_rf] for spm_factor in of_bucket(tc_list_factors_rf): spatial_rf_factor = ( x*y for x, y in zip(spatial_factor, rf_factor)) if not valid_spm(layer, stride, spatial_rf_factor, spm_factor): continue tc_list_after_spm = tuple([ int(x/y) for x, y in zip(tc_list_after_rf, spm_factor) ]) dram_factor = tc_list_after_spm if not valid_dram(layer, dram_factor): continue #assert tc_list == [ x*y*z*w for x,y,z,w in zip(spatial_factor, rf_factor, spm_factor, dram_factor)] evaluated += 1 if count_only: continue IV_ORDER = layer._default_loop_order for idx in range(len(IV_ORDER)): tiling_factor = [dram_factor[idx], spm_factor[idx], rf_factor[idx], spatial_factor[idx]] layer.set_tiling(IV_ORDER[idx], tiling_factor) edp, energy, cycle = layer.get_min_edp_energy_cycle() if edp < min_edp: min_edp = edp min_edp_sequence = (dram_factor, spm_factor, rf_factor, spatial_factor) if energy < min_energy: min_energy = energy min_energy_sequence = (dram_factor, spm_factor, rf_factor, spatial_factor) if cycle < min_cycle: min_cycle = cycle min_cycle_sequence = (dram_factor, spm_factor, rf_factor, spatial_factor) """ file_name = "_".join([ str(i) for i in spatial_factor ]) file_name = str(layer_index) + "_" + file_name output_file = "out/" + expr_name + "/" + file_name + ".txt" with open(output_file, "w") as outFile: line = ",".join([ str(item) for item in [min_edp, min_edp_sequence, min_energy, min_energy_sequence] ]) outFile.write(line) """ if count_only: return evaluated return min_edp, min_edp_sequence, min_energy, min_energy_sequence, min_cycle, min_cycle_sequence, evaluated
def run_thread(params): layer_index, tc_list, spatial_factor, env_config = params if env_config != None: env = expr_parameters.Environment( rf_energy=env_config["rf_energy"], spm_energy=env_config["spm_energy"], ) else: env = expr_parameters.Environment() env.pe_pipeline_stages = 1 layer_args = layer_info.resnet_parameters[layer_index] layer = ConvLayer(env, **layer_args) stride = layer.strides tc_list_after_spatial = [ int(x / y) for x, y in zip(tc_list, spatial_factor) ] tc_list_factors_spatial = [factors(tc) for tc in tc_list_after_spatial] min_energy = float("inf") min_energy_sequence = None min_edp = float("inf") min_edp_sequence = None evaluated = 0 for rf_factor in of_bucket(tc_list_factors_spatial): if not valid_rf(stride, rf_factor): continue tc_list_after_rf = [ int(x / y) for x, y in zip(tc_list_after_spatial, rf_factor) ] tc_list_factors_rf = [factors(tc) for tc in tc_list_after_rf] for spm_factor in of_bucket(tc_list_factors_rf): spatial_rf_factor = (x * y for x, y in zip(spatial_factor, rf_factor)) if not valid_spm(stride, spatial_rf_factor, spm_factor): continue tc_list_after_spm = tuple( [int(x / y) for x, y in zip(tc_list_after_rf, spm_factor)]) dram_factor = tc_list_after_spm if not valid_dram(dram_factor): continue #assert tc_list == [ x*y*z*w for x,y,z,w in zip(spatial_factor, rf_factor, spm_factor, dram_factor)] evaluated += 1 for idx in range(len(IV_ORDER)): tiling_factor = [ dram_factor[idx], spm_factor[idx], rf_factor[idx], spatial_factor[idx] ] layer.set_tiling(IV_ORDER[idx], tiling_factor) edp, energy, cycle = layer.get_min_edp_energy_cycle() if edp < min_edp: min_edp = edp min_edp_sequence = (dram_factor, spm_factor, rf_factor, spatial_factor) if energy < min_energy: min_energy = energy min_energy_sequence = (dram_factor, spm_factor, rf_factor, spatial_factor) """ file_name = "_".join([ str(i) for i in spatial_factor ]) file_name = str(layer_index) + "_" + file_name output_file = "out/" + expr_name + "/" + file_name + ".txt" with open(output_file, "w") as outFile: line = ",".join([ str(item) for item in [min_edp, min_edp_sequence, min_energy, min_energy_sequence] ]) outFile.write(line) """ return min_edp, min_edp_sequence, min_energy, min_energy_sequence, evaluated
def get_joined_df(csv_dir, ref_files, our_test_dir): ref_dfs = [] for ref_file_name in ref_files: df = pd.read_csv(csv_dir + "/" + ref_file_name) ref_dfs.append(df) for i, ref_df in enumerate(ref_dfs): print("processing", ref_files[i]) layer_arguments = layer_info.resnet_parameters[layers[i]] env = expr_parameters.Environment() env.pe_pipeline_stages = 1 layer = ConvLayer(env, **layer_arguments) loop_IVs = ["N", "M", "C", "Ox", "Oy", "Fx", "Fy"] ref_df["energy_from_dMazeRunner"] = np.nan ref_df["cycle_from_dMazeRunner"] = np.nan ref_df["edp_from_dMazeRunner"] = np.nan ref_df["energy_diff_percent"] = np.nan ref_df["layer"] = layers[i] ref_df.drop(ref_df[ref_df["Verify Tiling"] != True].index, inplace=True) for index, row in ref_df.iterrows(): if not row["Verify Tiling"]: continue #set tiling dram_tiling = ast.literal_eval(row["DRAM_tiling"]) spm_tiling = ast.literal_eval(row["SPM_tiling"]) rf_tiling = ast.literal_eval(row["RF_tiling"]) spatial_tiling = ast.literal_eval(row["Spatial_tiling"]) for idx in range(len(loop_IVs)): tiling_factor = [ dram_tiling[idx], spm_tiling[idx], rf_tiling[idx], spatial_tiling[idx] ] layer.set_tiling(loop_IVs[idx], tiling_factor) #find key from ordering spm_ordering = ast.literal_eval(row["SPM_schedule"]) dram_ordering = ast.literal_eval(row["DRAM_schedule"]) spm_ordering = tuple([iv.title() for iv in spm_ordering]) dram_ordering = tuple([iv.title() for iv in dram_ordering]) spm_reuse_factor = layer.determine_data_reuse( "SPM", user_ordering=spm_ordering)[0] dram_reuse_factor = layer.determine_data_reuse( "DRAM", user_ordering=dram_ordering)[0] spm_ordering = layer.get_ordering_from_reuse_factor( spm_reuse_factor, "SPM") dram_ordering = layer.get_ordering_from_reuse_factor( dram_reuse_factor, "DRAM") key = (dram_ordering, spm_ordering) cycles_of_all_orderings = layer.get_Cycles_One_Layer() energy_of_all_orderings = layer.get_Energy_One_Layer() ##### use the set of ordering that minimizes energy energy, dram_ordering, spm_ordering = layer.get_min_energy() key = (dram_ordering, spm_ordering) ##### cycle, energy = cycles_of_all_orderings[ key], energy_of_all_orderings[key] ref_df.at[index, "energy_from_dMazeRunner"] = energy ref_df.at[index, "cycle_from_dMazeRunner"] = cycle ref_df.at[index, "edp_from_dMazeRunner"] = energy * cycle # energy distribution energy_distributions_of_all_orderings = layer.get_Energy_Distribution( ) energy_MAC, energy_RF, energy_NOC, energy_SPM, energy_DRAM = energy_distributions_of_all_orderings[ key] ref_df.at[index, "energy_MAC_dMazeRunner"] = energy_MAC ref_df.at[index, "energy_RF_dMazeRunner"] = energy_RF ref_df.at[index, "energy_NOC_dMazeRunner"] = energy_NOC ref_df.at[index, "energy_SPM_dMazeRunner"] = energy_SPM ref_df.at[index, "energy_DRAM_dMazeRunner"] = energy_DRAM energy_from_ref = row["Energy"] ref_df.at[index, "energy_diff_percent"] = 100 * \ abs(energy-energy_from_ref)/energy_from_ref stride = layer.strides n_banks, size_in_bytes = get_spm_bank_and_size( stride, spatial_tiling, rf_tiling, spm_tiling) ref_df.at[index, "spm_banks_yang_et_al"] = n_banks ref_df.at[index, "spm_size_in_bytes_yang_et_al"] = size_in_bytes final_df = pd.DataFrame() for layer_index, ref_df in enumerate(ref_dfs): def change_dataflow_name(old_name): name = old_name.replace("IC", "C") name = name.replace("OC", "M") name = name.replace("ON", "N") tokens = name.strip().split("_") if len(tokens) not in [2, 4]: return None if len(tokens) == 4: tokens = tokens[:2] x, y = tokens return (y.title() + " | " + x.title()) def parse_config(old_name): tokens = old_name.strip().split("_") if len(tokens) == 4: return tokens[2] + "_" + tokens[3] else: return None columns = list(ref_df.columns.values) columns.remove("layer") new_column_order = ["layer"] + columns ref_df = ref_df[new_column_order] ref_df["dataflow_str"] = ref_df["Data_flow_mechanism"].apply( change_dataflow_name) ref_df["dataflow_config"] = ref_df["Data_flow_mechanism"].apply( parse_config) final_df = pd.concat([final_df, ref_df]) final_df = final_df.rename(index=str, columns={ "dataflow_str": "dataflow", "Energy": "energy_theirs", "energy_from_dMazeRunner": "energy_dMazeRunner", "energy_best": "energy_optimal", "cycle_from_dMazeRunner": "cycle_dMazeRunner", "edp_from_dMazeRunner": "edp_dMazeRunner", "energy_diff_percent": "energy_diff_%", }) final_df["energy_RF_diff_%"] = (final_df["Energy_RF"] - final_df["energy_RF_dMazeRunner"] ) / final_df["energy_RF_dMazeRunner"] * 100 final_df["energy_NOC_diff_%"] = ( final_df["Energy_NoC"] - final_df["energy_NOC_dMazeRunner"] ) / final_df["energy_NOC_dMazeRunner"] * 100 final_df["energy_SPM_diff_%"] = ( final_df["Energy_SPM"] - final_df["energy_SPM_dMazeRunner"] ) / final_df["energy_SPM_dMazeRunner"] * 100 final_df["energy_DRAM_diff_%"] = ( final_df["Energy_DRAM"] - final_df["energy_DRAM_dMazeRunner"] ) / final_df["energy_DRAM_dMazeRunner"] * 100 return final_df