def fatal_error(msg): ''' :param msg: :return: 用于打印错误信息列表 ''' print_stderr(msg) exit(-1)
def convert(def_path, caffemodel_path, data_output_path, phase): assert '.npy' in data_output_path code_output_path = data_output_path.replace('npy', 'py') try: transformer = TensorFlowTransformer(def_path, caffemodel_path, phase=phase) print_stderr('Converting data...') if caffemodel_path is not None: data = transformer.transform_data() print_stderr('Saving data...') # create directory if not existing dirname = os.path.dirname(data_output_path) if not os.path.exists(dirname) and dirname != '': os.makedirs(dirname) with open(data_output_path, 'wb') as data_out: np.save(data_out, data) if code_output_path: print_stderr('Saving source...') # create directory if not existing dirname = os.path.dirname(code_output_path) if not os.path.exists(dirname) and dirname != '': os.makedirs(dirname) with open(code_output_path, 'wb') as src_out: src_out.write(transformer.transform_source()) print_stderr('Done.') except KaffeError as err: fatal_error('Error encountered: {}'.format(err))
def convert(def_path, caffemodel_path, data_output_path, code_output_path, phase): ''' 用于对输入的模型进行加载,和进行模型装换 :param def_path: 输入的caffe网络结构文件prototxt格式 :param caffemodel_path: 输入的caffe训练的到的模型文件caffemodel格式 :param data_output_path: 输出的 :param code_output_path:输出的tensorflow的网络结构代码 :param phase:为网络的训练模式,test 或者是train :return: ''' try: transformer = TensorFlowTransformer(def_path, caffemodel_path, phase=phase) #加载网络结构和加载模型文件 print_stderr('Converting data...') #开始转换数据 if caffemodel_path is not None: data = transformer.transform_data() #转换权值和偏置数据 print_stderr('Saving data...') with open(data_output_path, 'wb') as data_out: np.save(data_out, data) #保存权值偏置数据 if code_output_path: #转化为tensorflow对应网络我心代码 print_stderr('Saving source...') with open(code_output_path, 'wb') as src_out: src_out.write(transformer.transform_source()) #保存网络模型代码 print_stderr('Done.') #转换完成 except KaffeError as err: #捕获异常 fatal_error('Error encountered: {}'.format(err))
def convert(def_path, caffemodel_path, data_output_path, phase): try: transformer = TensorFlowTransformer(def_path, caffemodel_path, phase=phase) print_stderr('Converting data...') if caffemodel_path is not None: data = transformer.transform_data() print_stderr('Saving data...') with open(data_output_path, 'wb') as data_out: np.save(data_out, data) # if code_output_path: # print_stderr('Saving source...') # with open(code_output_path, 'wb') as src_out: # src_out.write(transformer.transform_source()) # print_stderr('Done.') except KaffeError as err: fatal_error('Error encountered: {}'.format(err))
def convert(class_name, def_path, caffemodel_path, data_output_path, code_output_path, phase, num_classes): transformer = TensorFlowTransformer(class_name, def_path, caffemodel_path, num_classes, phase=phase) print_stderr('Converting data...') if caffemodel_path is not None: data = transformer.transform_data() print_stderr('Saving data...') with open(data_output_path, 'wb') as data_out: np.save(data_out, data) if code_output_path: print_stderr('Saving source...') with open(code_output_path, 'wb') as src_out: src_out.write(transformer.transform_source()) print_stderr('Done.')
def convert(def_path, caffemodel_path, data_output_path, code_output_path, input_list_path, input_shape_list_path, output_list_path, phase): try: transformer = TensorFlowTransformer(def_path, caffemodel_path, phase=phase) print_stderr('Converting data...') if caffemodel_path is not None: data = transformer.transform_data() print_stderr('Saving data...') with open(data_output_path, 'wb') as data_out: np.save(data_out, data) if code_output_path: print_stderr('Saving source...') with open(code_output_path, 'wb') as src_out: source = transformer.transform_source() src_out.write(source) if input_list_path or input_shape_list_path or output_list_path: mapper = TensorFlowMapper(transformer.graph) chains = mapper.map() inputs = [x.name for x in transformer.graph.get_input_nodes()] input_shapes = [[ node.output_shape.width, node.output_shape.height, node.output_shape.channels ] for node in transformer.graph.get_input_nodes()] outputs = [x.name for x in transformer.graph.get_output_nodes()] if input_list_path: with open(input_list_path, 'wb') as input_list_out: json.dump(inputs, input_list_out) if input_shape_list_path: with open(input_shape_list_path, 'wb') as input_shape_list_out: json.dump(input_shapes, input_shape_list_out) if output_list_path: with open(output_list_path, 'wb') as output_list_out: json.dump(outputs, output_list_out) print_stderr('Done.') except KaffeError as err: fatal_error('Error encountered: {}'.format(err))
def convert(def_path, caffemodel_path, data_output_path, code_output_path, phase, framework): try: if framework == 'tensorflow': from kaffe.tensorflow import TensorFlowTransformer transformer = TensorFlowTransformer(def_path, caffemodel_path, phase=phase) elif framework == 'theano': from kaffe.theano import TheanoTransformer transformer = TheanoTransformer(def_path, caffemodel_path, phase=phase) elif framework == 'tf_nchw': from kaffe.tf_nchw import TF_NCHW_Transformer transformer = TF_NCHW_Transformer(def_path, caffemodel_path, phase=phase) else: raise NotImplementedError('Not implemented target to convert') print_stderr('Converting data...') if caffemodel_path is not None: data = transformer.transform_data() print_stderr('Saving data...') with open(data_output_path, 'wb') as data_out: np.save(data_out, data) if code_output_path: print_stderr('Saving source...') with open(code_output_path, 'w') as src_out: src_out.write(transformer.transform_source()) print_stderr('Done.') except KaffeError as err: fatal_error('Error encountered: {}'.format(err)) except NotImplementedError as err: raise
def convert(def_path, caffemodel_path, data_output_path, code_output_path, standalone_output_path, phase): try: sess = tf.InteractiveSession() transformer = TensorFlowTransformer(def_path, caffemodel_path, phase=phase) print_stderr('Converting data...') if data_output_path is not None: data = transformer.transform_data() print_stderr('Saving data...') with open(data_output_path, 'wb') as data_out: np.save(data_out, data) if code_output_path is not None: print_stderr('Saving source...') with open(code_output_path, 'wb') as src_out: src_out.write(transformer.transform_source()) if standalone_output_path: filename, _ = os.path.splitext( os.path.basename(standalone_output_path)) temp_folder = os.path.join(os.path.dirname(standalone_output_path), '.tmp') try: os.makedirs(temp_folder) except: pass if data_output_path is None: data = transformer.transform_data() print_stderr('Saving data...') data_output_path = os.path.join(temp_folder, filename) + '.npy' with open(data_output_path, 'wb') as data_out: np.save(data_out, data) if code_output_path is None: print_stderr('Saving source...') code_output_path = os.path.join(temp_folder, filename) + '.py' with open(code_output_path, 'wb') as src_out: src_out.write(transformer.transform_source()) checkpoint_path = os.path.join(temp_folder, filename + '.ckpt') graph_name = os.path.basename(standalone_output_path) graph_folder = os.path.dirname(standalone_output_path) input_node = transformer.graph.nodes[0].name output_node = transformer.graph.nodes[-1].name tensor_shape = transformer.graph.get_node(input_node).output_shape tensor_shape_list = [ tensor_shape.batch_size, tensor_shape.height, tensor_shape.width, tensor_shape.channels ] sys.path.append(os.path.dirname(code_output_path)) module = os.path.splitext(os.path.basename(code_output_path))[0] class_name = transformer.graph.name KaffeNet = getattr(__import__(module), class_name) data_placeholder = tf.placeholder(tf.float32, tensor_shape_list, name=input_node) net = KaffeNet({input_node: data_placeholder}) # load weights stored in numpy format net.load(data_output_path, sess) print_stderr('Saving checkpoint...') saver = tf.train.Saver() saver.save(sess, checkpoint_path) print_stderr('Saving graph definition as protobuf...') tf.train.write_graph(sess.graph.as_graph_def(), graph_folder, graph_name, False) input_graph_path = standalone_output_path input_saver_def_path = "" input_binary = True input_checkpoint_path = checkpoint_path output_node_names = output_node restore_op_name = 'save/restore_all' filename_tensor_name = 'save/Const:0' output_graph_path = standalone_output_path clear_devices = True print_stderr('Saving standalone model...') freeze_graph(input_graph_path, input_saver_def_path, input_binary, input_checkpoint_path, output_node_names, restore_op_name, filename_tensor_name, output_graph_path, clear_devices, '') shutil.rmtree(temp_folder) print_stderr('Done.') except KaffeError as err: fatal_error('Error encountered: {}'.format(err))
def fatal_error(msg): print_stderr(msg) exit(-1)
def convert(def_path, caffemodel_path, data_output_path, code_output_path, phase): """ convert caffe model to tf/paddle models """ try: transformer = Transformer(def_path, caffemodel_path, phase=phase) print_stderr('Converting data...') if caffemodel_path is not None: data = transformer.transform_data() print_stderr('Saving data...') with open(data_output_path, 'wb') as data_out: np.save(data_out, data) if code_output_path: print_stderr('Saving source...') with open(code_output_path, 'wb') as src_out: src_out.write(transformer.transform_source()) print_stderr('set env variable before using converted model '\ 'if used custom_layers:') custom_pk_path = os.path.dirname(os.path.abspath(__file__)) custom_pk_path = os.path.join(custom_pk_path, 'kaffe') print_stderr('export CAFFE2FLUID_CUSTOM_LAYERS=%s' % (custom_pk_path)) print_stderr('Done.') return 0 except KaffeError as err: fatal_error('Error encountered: {}'.format(err)) return 1
def fatal_error(msg): """ fatal error encounted """ print_stderr(msg) exit(-1)
def convert(def_path, caffemodel_path, data_output_path, code_output_path, standalone_output_path, phase, freeze): try: sess = tf.InteractiveSession() transformer = TensorFlowTransformer(def_path, caffemodel_path, phase=phase) print_stderr('Converting data...') if data_output_path is not None: data = transformer.transform_data() print_stderr('Saving data...') with open(data_output_path, 'wb') as handle: pickle.dump(data, handle, protocol=pickle.HIGHEST_PROTOCOL) if code_output_path is not None: print_stderr('Saving source...') with open(code_output_path, 'wb') as src_out: src_out.write(transformer.transform_source()) if standalone_output_path: filename, _ = os.path.splitext(os.path.basename(standalone_output_path)) temp_folder = os.path.join(os.path.dirname(standalone_output_path), '.tmp') if not os.path.exists(temp_folder): os.makedirs(temp_folder) shutil.rmtree(temp_folder) # Delete old graphs if data_output_path is None: data = transformer.transform_data() print_stderr('Saving data...') data_output_path = os.path.join(temp_folder, filename) + '.npy' with open(data_output_path, 'wb') as handle: pickle.dump(data, handle, protocol=pickle.HIGHEST_PROTOCOL) if code_output_path is None: print_stderr('Saving source...') code_output_path = os.path.join(temp_folder, filename) + '.py' with open(code_output_path, 'wb') as src_out: src_out.write(transformer.transform_source()) checkpoint_path = os.path.join(temp_folder, filename + '.ckpt') graph_name = os.path.basename(standalone_output_path) graph_folder = os.path.dirname(standalone_output_path) input_node = transformer.graph.nodes[0].name output_node = transformer.graph.nodes[-1].name tensor_shape = transformer.graph.get_node(input_node).output_shape tensor_shape_list = [tensor_shape.batch_size, tensor_shape.height, tensor_shape.width, tensor_shape.channels] sys.path.append(os.path.dirname(code_output_path)) module = os.path.splitext(os.path.basename(code_output_path))[0] class_name = transformer.graph.name KaffeNet = getattr(__import__(module), class_name) data_placeholder = tf.compat.v1.placeholder( tf.float32, tensor_shape_list, name=input_node) net = KaffeNet({input_node: data_placeholder}) # load weights stored in numpy format net.load(data_output_path, sess) print_stderr('Saving checkpoint...') saver = tf.compat.v1.train.Saver() saver.save(sess, checkpoint_path) print_stderr('Saving graph definition as protobuf...') tf.io.write_graph(sess.graph.as_graph_def(), graph_folder, graph_name, False) writer = tf.compat.v1.summary.FileWriter('.tmp', sess.graph) writer.close() input_graph_path = standalone_output_path input_saver_def_path = "" input_binary = True input_checkpoint_path = checkpoint_path output_node_names = output_node restore_op_name = 'save/restore_all' filename_tensor_name = 'save/Const:0' output_graph_path = standalone_output_path clear_devices = True print_stderr('Saving standalone model...') output_node_names = '{0}/{0}'.format(output_node_names) if freeze == 'freeze_graph': freeze_graph(input_graph_path, input_saver_def_path, input_binary, input_checkpoint_path, output_node_names, restore_op_name, filename_tensor_name, output_graph_path, clear_devices, '') elif freeze == 'optimize_for_inference': graph_def = sess.graph.as_graph_def() graph_def = tf.graph_util.convert_variables_to_constants( sess, graph_def, [output_node_names]) graph_def_f32 = optimize_for_inference_lib.optimize_for_inference( graph_def, ['data'], [output_node_names], tf.float32.as_datatype_enum) tf.train.write_graph( graph_def_f32, "", standalone_output_path.rsplit('.',1)[0] + '.pb', as_text=False) tf.train.write_graph( graph_def_f32, "", standalone_output_path.rsplit('.',1)[0] + '.pbtxt', as_text=True) #f = shutil.rmtree(temp_folder) writer = tf.compat.v1.summary.FileWriter('.tmp', sess.graph) writer.close() print_stderr('Done.') except KaffeError as err: fatal_error('Error encountered: {}'.format(err))