def msame_run(self, msame_dir): """ Function Description: run msame project Parameter: msame_dir: msame project directory Return Value: npu dump data path Exception Description: when invalid npu dump data path throw exception """ self._compare_shape_vs_bin_file() npu_data_output_dir = os.path.join(self.arguments.out_path, NPU_DUMP_DATA_BASE_PATH) utils.create_directory(npu_data_output_dir) model_name, extension = utils.get_model_name_and_extension( self.arguments.offline_model_path) acl_json_path = os.path.join(msame_dir, ACL_JSON_PATH) if not os.path.exists(acl_json_path): os.mknod(acl_json_path, mode=0o600) self._write_content_to_acl_json(acl_json_path, model_name, npu_data_output_dir) msame_cmd = [ "./" + MSAME_COMMAND_PATH, "--model", self.arguments.offline_model_path, "--input", self.arguments.input_path, "--device", self.arguments.device, "--output", npu_data_output_dir ] self._make_msame_cmd_for_shape_range(msame_cmd) os.chdir(os.path.join(msame_dir, OUT_PATH)) # do msame command utils.print_info_log( "Run command line: cd %s && %s" % (os.path.join(msame_dir, OUT_PATH), " ".join(msame_cmd))) utils.execute_command(msame_cmd) npu_dump_data_path, file_is_exist = utils.get_dump_data_path( npu_data_output_dir) if not file_is_exist: utils.print_error_log("The path {} dump data is not exist.".format( npu_dump_data_path)) raise AccuracyCompareException( utils.ACCURACY_COMPARISON_INVALID_PATH_ERROR) # net output data path npu_net_output_data_path, file_is_exist = utils.get_dump_data_path( npu_data_output_dir, True) if not file_is_exist: utils.print_error_log( "The path {} net output data is not exist.".format( npu_net_output_data_path)) raise AccuracyCompareException( utils.ACCURACY_COMPARISON_INVALID_PATH_ERROR) self._convert_net_output_to_numpy(npu_net_output_data_path) return npu_dump_data_path, npu_net_output_data_path
def save(self, export_path): create_directory(export_path) # The number and order of constructor arguments must match. with open(os.path.join(export_path, 'params.pkl'), 'wb') as f: pickle.dump([ # the arguments of the class constructor self.input_dim, self.z_dim, self.d_conv_filters, self.d_conv_kernel_size, self.d_conv_strides, self.d_batch_norm_momentum, self.d_dropout_rate, self. g_initial_dense_layer_size, self.g_upsample, self.g_conv_filters, self.g_conv_kernel_size, self.g_conv_strides, self.g_batch_norm_momentum, self.g_dropout_rate, ], f)
def main(args): in_stash = biggie.Stash(args.data_file) utils.create_directory(os.path.dirname(args.output_file)) if os.path.exists(args.output_file): os.remove(args.output_file) out_stash = biggie.Stash(args.output_file) STATUS['total'] = len(in_stash) for idx, key in enumerate(in_stash.keys()): new_entity = wrap_cqt_for_key(in_stash.get(key), args.length, args.stride) out_stash.add(key, new_entity) out_stash.close()
def _save(self): create_directory(self.export_path) # The number and order of constructor arguments must match. with open(os.path.join(self.export_path, 'params.pkl'), 'wb') as f: pickle.dump( [ # the arguments of the class constructor self.input_dim, self.z_dim, self.encoder_conv_filters, self.encoder_conv_kernel_size, self.encoder_conv_strides, self.decoder_conv_t_filters, self.decoder_conv_t_kernel_size, self.decoder_conv_t_strides, self.use_batch_norm, self.use_dropout, self.export_path ], f)
def _create_dir(self): # create input directory utils.create_directory(self.data_dir) # create dump_data/tf directory utils.create_directory(self.tf_dump_data_dir) # create tmp directory utils.create_directory(self.tmp_dir)
def _create_dir(self): # create input directory data_dir = os.path.join(self.args.out_path, "input") utils.create_directory(data_dir) # create dump_data/onnx directory onnx_dump_data_dir = os.path.join(self.args.out_path, "dump_data/onnx") utils.create_directory(onnx_dump_data_dir) # create model directory model_dir = os.path.join(self.args.out_path, "model") utils.create_directory(model_dir) return data_dir, onnx_dump_data_dir, model_dir