def create_stimuli(root, ksize, stride, bitwidth_data_in, bitwidth_data_out, bitwidth_weights, channel_in, channel_out, width, height): a_rand = random_fixed_array((1, channel_in, height, width), bitwidth_data_in) a_in = v_to_fixedint(a_rand) np.savetxt(join(root, "gen", f"input_{ksize}_{stride}_{channel_in}.csv"), flatten(a_in), delimiter=", ", fmt="%3d") a_weights_rand = random_fixed_array( (channel_out, channel_in, ksize, ksize), bitwidth_weights) a_bias_rand = random_fixed_array((channel_out, ), bitwidth_weights) # weights and bias to txt weights_to_files(a_weights_rand, a_bias_rand, f"conv_{ksize}_{stride}_{channel_in}", join(root, "gen")) # assign the outputs conv_out = v_to_fixedint( conv(a_rand, a_weights_rand, a_bias_rand, (ksize, stride), bitwidth_data_out.as_tuple)) filename = join(root, "gen", f"output_{ksize}_{stride}_{channel_in}.csv") with open(filename, "w") as outfile: np.savetxt(outfile, flatten(conv_out), delimiter=", ", fmt="%3d")
def create_stimuli(root, model_name): model = onnx.load(join(root, model_name)) shape = cnn_onnx.parse_param.get_input_shape(model) a_rand = random_fixed_array(shape, Bitwidth(8, 8, 0), signed=False) a_in = v_to_fixedint(a_rand) a_out = v_to_fixedint(cnn_onnx.inference.numpy_inference(model, a_rand)) np.savetxt(join(root, "input.csv"), flatten(a_in), delimiter=", ", fmt="%3d") np.savetxt(join(root, "output.csv"), a_out, delimiter=", ", fmt="%3d")
def create_stimuli(root, ksize, stride, bitwidth, shape): a_rand = random_fixed_array(shape, bitwidth) a_in = v_to_fixedint(a_rand) np.savetxt(join(root, "src", "input_%d_%d.csv" % (ksize, stride)), flatten(a_in), delimiter=", ", fmt="%3d") # assign the outputs filename = join(root, "src", "output_%d_%d.csv" % (ksize, stride)) max_out = v_to_fixedint(max_pool(a_rand, ksize, stride)) with open(filename, "w") as outfile: np.savetxt(outfile, flatten(max_out), delimiter=", ", fmt="%3d")
def create_stimuli(root, shape, bitwidth): a_rand = random_fixed_array(shape, bitwidth) a_in = v_to_fixedint(a_rand) np.savetxt(join(root, "src", "input.csv"), flatten(a_in), delimiter=", ", fmt="%3d") a_out = v_to_fixedint(avg_pool(a_rand)) np.savetxt(join(root, "src", "output.csv"), a_out, delimiter=", ", fmt="%3d")
def create_stimuli(root, pool_dim, bitwidth): a_rand = random_fixed_array((pool_dim, pool_dim), bitwidth) a_in = v_to_fixedint(a_rand) np.savetxt(join(root, "src", "input%d.csv" % pool_dim), a_in, delimiter=", ", fmt="%3d") # use atleast_1d to fulfill 1d requirement of savetxt a_out = np.atleast_1d(to_fixedint(np.max(a_rand))) np.savetxt(join(root, "src", "output%d.csv" % pool_dim), a_out, delimiter=", ", fmt="%3d")
def create_stimuli(root, bitwidth, leaky, sample_cnt: int = 1): a_rand = random_fixed_array((sample_cnt), bitwidth) a_in = v_to_fixedint(a_rand) np.savetxt(join(root, "src", "input_" + "leaky" * leaky + ".csv"), a_in, delimiter=", ", fmt="%3d") a_out = (relu(a_rand) if not leaky else leaky_relu( a_rand, FpBinary(int_bits=0, frac_bits=3, value=0.125))) np.savetxt(join(root, "src", "output_" + "leaky" * leaky + ".csv"), v_to_fixedint(a_out), delimiter=", ", fmt="%3d")
def create_stimuli(root, stage, ksize, bitwidth_data, bitwidth_weights): # vunit import from csv can only handle datatype integer. # Therefore the random fixed point values have to be converted to # corresponding integer values. a_rand = random_fixed_array((ksize, ) * 2, bitwidth_data, signed=stage != 1) # manually extend the bitwidth to implicitly create unsigned values sign_bit = 1 if stage == 1 else 0 a_in = v_to_fixedint(a_rand) name = "input_data%s.csv" % ("_stage1" if stage == 1 else str(ksize)) np.savetxt(join(root, "src", name), a_in, delimiter=", ", fmt="%3d") a_weights_rand = random_fixed_array((ksize, ksize), bitwidth_weights) a_weights_in = v_to_fixedint(a_weights_rand) name = "input_weights%s.csv" % ("_stage1" if stage == 1 else str(ksize)) np.savetxt(join(root, "src", name), a_weights_in, delimiter=", ", fmt="%3d") product = a_rand * a_weights_rand additions = 0 if ksize == 1 else int(math.log2(ksize - 1) * 2) # TODO: replace for loop for value in product.flat: # No rounding needed for resize. # The range is covered by "additions + 1 + sign_bit" value.resize( (value.format[0] + additions + 1 + sign_bit, value.format[1]), OverflowEnum.excep) sum_ = np.sum(product) # use atleast_1d to fulfill 1d requirement of savetxt a_out = np.atleast_1d(to_fixedint(sum_)) name = "output%s.csv" % ("_stage1" if stage == 1 else str(ksize)) np.savetxt(join(root, "src", name), a_out, delimiter=", ", fmt="%d")
def create_stimuli(root, model_name): model = onnx.load(join(root, model_name)) shape = cnn_onnx.parse_param.get_input_shape(model) a_rand = random_fixed_array(shape, Bitwidth(8, 8, 0), signed=False) a_in = v_to_fixedint(a_rand) a_out = v_to_fixedint(cnn_onnx.inference.numpy_inference(model, a_rand)) # ONNX runtime prediction, TODO: doesn't work right now # https://github.com/microsoft/onnxruntime/issues/2964 # sess = rt.InferenceSession(join(root, model_name)) # input_name = sess.get_inputs()[0].name # pred_onnx = sess.run(None, {input_name: in_.astype(np.float32)})[0] # print(pred_onnx) np.savetxt(join(root, "input.csv"), flatten(a_in), delimiter=", ", fmt="%3d") np.savetxt(join(root, "output.csv"), a_out, delimiter=", ", fmt="%3d")