def to_proto(*tops): """Generate a NetParameter that contains all layers needed to compute all arguments.""" layers = OrderedDict() autonames = Counter() for top in tops: top.fn._to_proto(layers, {}, autonames) net = caffe_pb2.NetParameter() net.layer.extend(layers.values()) return net
def load_using_pb(self): data = caffe_pb2.NetParameter() print("Loading the caffemodel. This takes a couple of minutes.") data.MergeFromString(open(self.data_path, 'rb').read()) print("Done reading") pair = lambda layer: (layer.name, self.read_size(layer)) layers = data.layers or data.layer map(pair, [layer for layer in layers if layer.blobs]) with open('offset_' + self.data_path[:-11] + '.json', 'wb') as f: f.write(json.dumps(self.json_dict)) print("Done writing.")
def get_network_visualization(self, desc): """ return visualization of network """ net = caffe_pb2.NetParameter() text_format.Merge(desc, net) # Throws an error if name is None if not net.name: net.name = 'Network' return '<image src="data:image/png;base64,' + caffe.draw.draw_net( net, 'UD').encode('base64') + '" style="max-width:100%" />'
def models_visualize_network(): """ Returns a visualization of the custom network as a string of PNG data """ net = caffe_pb2.NetParameter() text_format.Merge(flask.request.form['custom_network'], net) # Throws an error if name is None if not net.name: net.name = 'Network' return '<image src="data:image/png;base64,' + caffe.draw.draw_net( net, 'UD').encode('base64') + '" style="max-width:100%" />'
def get_network_from_previous(self, previous_network): """ return new instance of network from previous network """ network = caffe_pb2.NetParameter() network.CopyFrom(previous_network) # Rename the final layer # XXX making some assumptions about network architecture here ip_layers = [l for l in network.layer if l.type == 'InnerProduct'] if len(ip_layers) > 0: ip_layers[-1].name = '%s_retrain' % ip_layers[-1].name return network
def prototxt_to_forward(prototxt): net = caffe_pb2.NetParameter() with open(prototxt, 'r') as f: Merge(f.read(), net) if net.layer: net_layers = net.layer else: net_layers = net.layers layers = [] for layer in net_layers: layers.append(parse_layer(layer)) return layers
def main(): args = parse_args() net = caffe_pb2.NetParameter() text_format.Merge(open(args.input_net_proto_file).read(), net) print('Drawing net to %s' % args.output_image_file) phase = None if args.phase == "TRAIN": phase = caffe.TRAIN elif args.phase == "TEST": phase = caffe.TEST elif args.phase != "ALL": raise ValueError("Unknown phase: " + args.phase) draw_net_to_file(net, args.output_image_file, args.rankdir, phase)
def get_network_visualization(self, **kwargs): """ return visualization of network """ desc = kwargs['desc'] net = caffe_pb2.NetParameter() text_format.Merge(desc, net) # Throws an error if name is None if not net.name: net.name = 'Network' return ('<image src="data:image/png;base64,' + str(caffe.draw.draw_net(net, 'UD')) + '" style="max-width:100%" />')
def create_header(prototxtFile): # Create the Caffe network parameter object net = caffe_pb2.NetParameter() # merge the given network with the object. This will read the text file and initialize all # parameters of the object with those from the text file. Merge((open(prototxtFile, 'r').read()), net) hfile = open(headerFileName, 'w') sfile = open(sourceFileName, 'w') hfile.write(startString) hfile.write(docString) sfile.write(docString) sfile.write("#include " + '"' + headerFileName + '"' + "\n\n") # find the total number of layers in the network. Input layer is not counted as it is a data layer. # hence -1 NumCnnLayers = len(net.layer) hfile.write("#define NO_DEEP_LAYERS " + str(NumCnnLayers - 1) + "\n") # create header file writer object hw = DnnHeaderCreater(net, hfile, sfile) hw.parse_params() # make sure the structures are arranged in the layer connection format hw.order_structures() hfile.write("#define INPUT_IMG_WIDTH " + str(hw.inputWidth) + "\n") hfile.write("#define INPUT_IMG_HEIGHT " + str(hw.inputHeight) + "\n") hfile.write("#define NO_INPUT_MAPS " + str(hw.noInputMaps) + "\n") # write general struct definition to the header file hw.write_struct_definition(structName) # declare the array of structure hfile.write("extern " + "const " + structName + " " + arrayName + "[NO_DEEP_LAYERS];\n\n") hw.write_struct_array(structName) # write the header end string hfile.write(endString) hfile.write('\n') # print approx no of operations present in the network #hw.compute_no_ops() hfile.close() sfile.close() cprint('Generated source files successfully', 'green') cprint( 'Copy {:s} and {:s} to the main project src and inc directories respectively.' .format(sourceFileName, headerFileName), 'green')
def create_softmax_layer(self, add_net_layer_name): add_softamx_layers = [] for elem in add_net_layer_name: softmax_layer= caffe_pb2.LayerParameter( name = "{}-softmax".format(elem), type = "Softmax", bottom = elem ) add_softamx_layers.append(softmax_layer) net_proto = caffe_pb2.NetParameter() for elem in add_softamx_layers: net_proto.layer.extend([elem]) return net_proto
def fillList(self): self.data = open(root + '/net/netData.prototxt').read() self.listWidget.clear() self.netHandler = netConfig_pb2.Param() self.protoHandler = caffe_pb2.NetParameter() text_format.Merge(self.data, self.netHandler) text_format.Merge( open(self.netHandler.net[self.index].protopath).read(), self.protoHandler) for elem in self.protoHandler.layers: item = QtGui.QListWidgetItem(elem.name) item.setFlags(item.flags() | QtCore.Qt.ItemIsUserCheckable) item.setCheckState(QtCore.Qt.Unchecked) self.listWidget.addItem(item)
class Result: param = caffe_pb2.NetParameter() def __init__(self,infile): input = open(infile, 'rb') Result.param.ParseFromString(input.read()) def getlayer(self,param_name): for layers in Result.param.layer: if layers.name == param_name: return layers def getdata(self,param_name,n): for layers in Result.param.layer: if layers.name == param_name: return layers.blobs
def read_deploy_into_proto_delete_slience(deploy_file, net_proto_layer): net_proto = caffe_pb2.NetParameter() # net_proto_layer = caffe_pb2.NetParameter() f = open(deploy_file, 'r') text_format.Merge(f.read(), net_proto) f.close() # #delete data layer # del net_proto.layer[0] #delete slience layer del net_proto.layer[-1] for j in range(len(net_proto.layer)): net_proto_layer.layer.extend([net_proto.layer[j]]) return net_proto_layer
def insertLayer(self, handle, pos, data): #Step 1: Assign Net Parameter c = caffe_pb2.NetParameter() text_format.Merge(data, c) for idx in range(len(c.layer)): handle.layer.add() #Step 2: length = len(handle.layer) for idx in range(length - 1, pos - 1, -1): h = caffe_pb2.LayerParameter() text_format.Merge(handle.layer[idx - len(c.layer)].__str__(), h) handle.layer[idx].CopyFrom(h) for idx in range(pos, pos + len(c.layer)): handle.layer[idx].CopyFrom(c.layer[idx - pos])
def parse_caffemodel(filepath): ''' parses the trained .caffemodel file filepath: /path/to/trained-model.caffemodel returns: layers ''' f = open(filepath) contents = f.read() netparam = caffe_pb2.NetParameter() netparam.ParseFromString(contents) layers = find_layers(netparam) return layers
def change_input(src_deploy_file, dst_deploy_file, height, width): print(height, width) net_proto = caffe_pb2.NetParameter() #load net f = open(src_deploy_file, 'r') text_format.Merge(f.read(), net_proto) f.close() net_proto.input_dim[2] = height net_proto.input_dim[3] = width #save changed model f = open(dst_deploy_file, 'w') print(net_proto, file=f) f.close()
def main(): if len(sys.argv) < 4: print( "Usage : python caffe_to_nnir.py <caffeModel> <nnirOutputFolder> --input-dims n,c,h,w [--verbose 0|1] [--node_type_append 0/1 (optional: appends node type to output tensor name)]" ) sys.exit(1) caffeFileName = sys.argv[1] outputFolder = sys.argv[2] input_dims = sys.argv[4].split(',') verbose = 0 """if(len(sys.argv) > 5): verbose = 1 if int(sys.argv[6]) else 0 if (verbose): print ("OK: verbose enabled.") """ #appends node type to output tensor name. node_type_append = 0 pos = 5 while pos < len(sys.argv) and len( sys.argv) >= 5 and sys.argv[pos][:2] == '--': if sys.argv[pos] == '--node_type_append': node_type_append = int(sys.argv[pos + 1]) pos = pos + 2 elif sys.argv[pos] == '--verbose': verbose = int(sys.argv[pos + 1]) pos = pos + 2 if (verbose): print("OK: verbose enabled.") print("OK: loading caffemodel from %s ..." % (caffeFileName)) net_parameter = caffe_pb2.NetParameter() if not os.path.isfile(caffeFileName): print("ERROR: unable to open : " + caffeFileName) sys.exit(1) if (verbose): print("parsing the caffemodel from : " + str(caffeFileName)) net_parameter.ParseFromString(open(caffeFileName, 'rb').read()) print("OK: caffemodel read successful") print("converting to AMD NNIR format in %s folder ... " % (outputFolder)) if (verbose): print("input parameters obtained are : " + str(input_dims[0]) + " " + str(input_dims[1]) + " " + str(input_dims[2]) + " " + str(input_dims[3])) caffe2ir(net_parameter, input_dims, outputFolder, verbose, node_type_append)
def __init__(self, proto, inputs=dict(), input_format="NC*"): """Create network using structure from Caffe proto file. Args: proto: The plaintext network prototxt file on disk. Type `str`. inputs: Dictionary mapping input variable `top` strings to Tensors. Use this to connect Tensors directly to the network, instead of using `tf.placeholder` type variables that are created by default for all layers of type `Input`. `tf.placeholder` instances will be created for any `top` values missing in the dictionary. Defaults to empty `dict`. input_format: The order of inputs specified in the prototxt file. The TensorFlow program is expected to provide inputs in the order `N*C`, where the dimension `N` denotes batch, `C` denotes the number of channels, and `*` denotes the rest of the dimensions. Valid arguments are `N*C`, `NC*`, `*NC`, `CN*`, `C*N`, and `*CN`. The network will permute the order of dimensions specified in prototxt file for the InputParameter in the order `N*C`. Defaults to `NC*`. """ # Layer names mapped to outputs. self._name_outputs = defaultdict(list) # Layer top names mapped to outputs. self._top_outputs = defaultdict(list) # Layer name to list of variables. self._name_vars = defaultdict(list) # Protobuf LayerParameter messages for each layer, loaded in the # order presented in the proto file. self._layer_params = [] # List of variables. self._vars = [] # List of trainable variables. self._tvars = [] # Layer name to (name, var) mapping. self._layer_vars = defaultdict(list) # The NetParameter protobuf message. self._net_param = cpb.NetParameter() self._input_format = input_format self._inputs = dict(inputs) # Load and parse the prototxt file. self._parse_proto(proto) # Create network, based on the parse. self._make_network()
def save_deploy_file_classification(network, path, num_categories, crop_size=None, feature_dims=None, logger=None): """ Save deploy_file to disk """ network = cleanedUpClassificationNetwork(network, num_categories) _, _, deploy_layers = filterLayersByState(network) deploy_network = caffe_pb2.NetParameter() deploy_file = CAFFE_DEPLOY_FILE # Input deploy_network.input.append('data') shape = deploy_network.input_shape.add() shape.dim.append(1) shape.dim.append(feature_dims[2]) # TODO - Implement crop_size shape.dim.append(feature_dims[0]) shape.dim.append(feature_dims[1]) # Layers deploy_network.MergeFrom(deploy_layers) # Write to file with open(path + "/" + deploy_file, 'w') as outfile: text_format.PrintMessage(deploy_network, outfile) # network sanity checks if logger: logger.debug("Network sanity check - deploy") net_sanity_check(deploy_network, caffe_pb2.TEST) found_softmax = False for layer in deploy_network.layer: if layer.type == 'Softmax': found_softmax = True break assert found_softmax, 'Your deploy network is missing a Softmax layer! Read the documentation for custom networks and/or look at the standard networks for examples.'
def onNetSelectionChangedSlot(self, index=0): self.currentNet = netConfig_pb2.NetParam() for elem in self.netHandler.net: if elem.name == self.comboBox_2.currentText().__str__(): self.currentNet.CopyFrom(elem) #if str(self.currentNet)=="":self.currentNet. self.listWidget.clear() handle = caffe_pb2.NetParameter() if (self.currentNet.__str__() == ""): return text_format.Merge(open(self.currentNet.protopath).read(), handle) for elem in handle.layer: item = QtGui.QListWidgetItem(elem.name) item.setFlags(item.flags() | QtCore.Qt.ItemIsUserCheckable) item.setCheckState(QtCore.Qt.Unchecked) self.listWidget.addItem(item) for elem in handle.layers: item = QtGui.QListWidgetItem(elem.name) item.setFlags(item.flags() | QtCore.Qt.ItemIsUserCheckable) item.setCheckState(QtCore.Qt.Unchecked) self.listWidget.addItem(item)
def setupClass(cls): super(TestCreate, cls).setupClass() with app.test_request_context(): cls.url = flask.url_for('image_classification_model_create') dj = mock.Mock(spec=digits.dataset.ImageClassificationDatasetJob) dj.status.is_running.return_value = True dj.id.return_value = 'dataset' dj.name.return_value = '' mj = mock.Mock(spec=digits.model.ImageClassificationModelJob) mj.id.return_value = 'model' mj.name.return_value = '' _, cls.temp_snapshot_path = tempfile.mkstemp( ) #instead of using a dummy hardcoded value as snapshot path, temp file path is used to avoid the filen't exists exception in views.py. mj.train_task.return_value.snapshots = [(cls.temp_snapshot_path, 1)] mj.train_task.return_value.network = caffe_pb2.NetParameter() digits.webapp.scheduler.jobs = [dj, mj]
def read_deploy_into_proto_config_delete_num(deploy_file, net_proto_layer, bn_use_global_stats, delete_num): net_proto = caffe_pb2.NetParameter() # net_proto_layer = caffe_pb2.NetParameter() f = open(deploy_file, 'r') text_format.Merge(f.read(), net_proto) f.close() #reset all batchnorm use_global_stats to false: for elem in net_proto.layer: if elem.type == "BatchNorm": elem.batch_norm_param.use_global_stats = bn_use_global_stats for i in range(delete_num): del net_proto.layer[-1] for j in range(len(net_proto.layer)): net_proto_layer.layer.extend([net_proto.layer[j]]) return net_proto_layer
def __init__(self, netName, netList, batchSize, dataName, saveName): self.batchSize = batchSize self.netHandler = netConfig_pb2.Param() text_format.Merge( open(root + '/net/netData.prototxt').read(), self.netHandler) self.currentNet = netConfig_pb2.NetParam() for elem in self.netHandler.net: print elem.name, netName if elem.name == netName: self.currentNet.CopyFrom(elem) self.protoHandler = caffe_pb2.NetParameter() text_format.Merge( open(self.currentNet.protopath).read(), self.protoHandler) self.dataName = dataName self.saveName = saveName self.netList = netList self.netName = netName
def convert_caffemodel(fname, output): proto = caffe_pb2.NetParameter() with open(fname, "r") as f: proto.ParseFromString(f.read()) plugin_type = ["ELU", "PReLU"] count_elu = 0 count_prelu = 0 for layer in proto.layer: if layer.type == "ELU": count_elu += 1 modify_elu_weight(layer, count_elu) elif layer.type == "PReLU": count_prelu += 1 modify_prelu_weight(layer, count_prelu) else: pass with open(output + '.caffemodel', 'wb') as f: f.write(proto.SerializeToString())
def parse_caffemodel(filepath): ''' parses the trained .caffemodel file filepath: /path/to/trained-model.caffemodel returns: a dict mapping layer name -> layer blobs ''' f = open(filepath) contents = f.read() netparam = caffe_pb2.NetParameter() netparam.ParseFromString(contents) layers = find_layers(netparam) param_dict = {} # goes name -> parameter. TODO: something else? for layer in layers: param_dict[layer.name] = layer.blobs return param_dict
def modify_caffemodel(layers_to_modify=['conv4b'], save_data=True): with open('final_c3d_ucf101_finetune_whole_iter_100', 'r') as f: cq2 = caffe_pb2.NetParameter() cq2.ParseFromString(f.read()) layers = cq2.layers save_model = target_model for lc in layers: name = lc.name if (name == 'pre_pool4'): print lc print name for layer_name in layer_names: if (name == layer_name): weight = np.float32(np.array(lc.blobs[0].data)) bias = np.float32(np.array(lc.blobs[0].data)) if (save_data == True): weight.tofile( os.path.join(origin_weight_folder, name + '_weight.bin')) bias.tofile( os.path.join(origin_weight_folder, name + '_bias.bin')) print 'Finish save data to layer ' + name else: for layer in layers_to_modify: if (name == 'pre_pool4'): print lc save_model += '_' + name lc.blobs[0].width = 1 lc.blobs[0].height = 1 lc.convolution_param.kernel_size = 1 data = read_data( os.path.join(modify_weight_folder, name + '_weight.bin')) lc.blobs[0].data[:] = data print 'Finish assign new data to layer ' + name print 'Save all parameters to file ' + save_model with open(save_model, 'wb') as f: f.write(cq2.SerializeToString())
def load_from_proto(self, prototxt): net = caffe_pb2.NetParameter() with open(prototxt, 'r') as f: Merge(f.read(), net) def xor(a, b): return (a and (not b)) or ((not a) and b) assert xor(len(net.layer) > 0, len(net.layers) > 0), \ "Net cannot have both new and old layer types." if net.layer: net_layers = net.layer else: net_layers = net.layers for layer in net_layers: include_phase_list = map(lambda x: x.phase, layer.include) if len(include_phase_list) > 0 and self.phase_map[ self.phase] not in include_phase_list: continue new_layer = layers.Unknown({}) new_layer.p = layer self.layers.append(new_layer) return net
def eventFilter(self, source, event): import sys if event.type() == QtCore.QEvent.Close: print 'Closed' print self.treeWidget.currentIndex().row() #self.lineEditSavePath.setText(self.newWidget.toPlainText()) self.layerhandler = caffe_pb2.NetParameter() #print self.newWidget.toPlainText() if (self.isNewLayer == True and self.newWidget.isSubmitted == True): text_format.Merge( self.newWidget.textEdit.toPlainText().__str__(), self.layerhandler) self.textEdit.setText(str(self.protohandler)) self.insertLayer(self.protohandler, self.treeWidget.currentIndex().row(), self.layerhandler.__str__()) print '--------------------------------------------------------------' print self.protohandler self.textEdit.setText(str(self.protohandler)) self.loadTreeWidget() self.isNewLayer = False else: if (self.newWidget.isSubmitted == False): return text_format.Merge( self.newWidget.textEdit.toPlainText().__str__(), self.layerhandler) self.protohandler.layer[self.treeWidget.currentIndex().row() - 1].CopyFrom(self.layerhandler.layer[0]) self.textEdit.setText(str(self.protohandler)) self.loadTreeWidget() if event.type() == QtCore.QEvent.MouseButtonPress: self.lineEditSavePath.setText( QtGui.QFileDialog.getSaveFileName(self, self.tr("Open File"), str(self.root + '/net/data'), '*.prototxt')) return QtGui.QWidget.eventFilter(self, source, event)
def read_deploy_into_proto(deploy_file, net_proto_layer): net_proto = caffe_pb2.NetParameter() # net_proto_layer = caffe_pb2.NetParameter() f = open(deploy_file, 'r') text_format.Merge(f.read(), net_proto) f.close() # #delete data layer # del net_proto.layer[0] #delete slience layer del net_proto.layer[-1] #delete normalize layer del net_proto.layer[-1] #if last layer is batchnorm layer then set use_global_stats to false for elem in net_proto.layer: if elem.type.find('BatchNorm') >=0 : elem.batch_norm_param.use_global_stats = False for j in range(len(net_proto.layer)): net_proto_layer.layer.extend([net_proto.layer[j]]) return net_proto_layer
def read_deploy_into_proto_changedp(deploy_file, net_proto_layer, bn_use_global_stats): net_proto = caffe_pb2.NetParameter() # net_proto_layer = caffe_pb2.NetParameter() f = open(deploy_file, 'r') text_format.Merge(f.read(), net_proto) f.close() for index, elem_layer in enumerate(net_proto.layer): if elem_layer.type == "Softmax": del net_proto.layer[index] #reset all batchnorm use_global_stats to false: for elem in net_proto.layer: if elem.type == "BatchNorm": elem.batch_norm_param.use_global_stats = bn_use_global_stats if elem.name.find("dw") >=0 and elem.type == "Convolution": elem.type = "DepthwiseConvolution" for j in range(len(net_proto.layer)): net_proto_layer.layer.extend([net_proto.layer[j]]) return net_proto_layer