def make_channel_pruning_model(input_file, output_file, bottom_layer, rank): with open(input_file, 'r') as fp: nett = NetParameter() text_format.Parse(fp.read(), nett) """ do not anything """ def _creat_new(name, layer_name): new_ = LayerParameter() new_.CopyFrom(layer_name) new_.name = name new_.convolution_param.ClearField('num_output') return new_ new_layer = [] for layer in nett.layer: if layer.name != bottom_layer: new_layer.append(layer) else: newConv = _creat_new(bottom_layer, layer) conv_param = newConv.convolution_param conv_param.num_output = rank new_layer.append(newConv) new_net = NetParameter() new_net.CopyFrom(nett) del (new_net.layer[:]) new_net.layer.extend(new_layer) with open(output_file, 'w') as fp: fp.write(text_format.MessageToString(new_net))
def resize_network(netdef, name2num, verbose=True): """Change number of channels in convolutions netdef: network params name2num: maps from channel name to new number of channels verbose: if True, display changes """ new_layers = [] for l in netdef.layer: newl = LayerParameter() newl.CopyFrom(l) if (l.name in name2num): if (l.type == 'Convolution'): if verbose: print(l.name + ': \t' + 'Changing num_output from ' + str(l.convolution_param.num_output) + ' to ' + str(name2num[l.name])) newl.convolution_param.num_output = name2num[l.name] if newl.convolution_param.group > 1: newl.convolution_param.group = name2num[l.name] else: if verbose: print('Layer ' + l.name + ' is not convolution, skipping') new_layers.append(newl) new_pnet = NetParameter() new_pnet.CopyFrom(netdef) del (new_pnet.layer[:]) new_pnet.layer.extend(new_layers) return new_pnet
def make_lowrank_model(input_file, conf, output_file): with open(input_file, 'r') as fp: net = NetParameter() pb.text_format.Parse(fp.read(), net) new_layers = [] for layer in net.layer: if not layer.name in conf.keys(): new_layers.append(layer) continue v, h = vh_decompose(layer, conf[layer.name]) new_layers.extend([v, h]) new_net = NetParameter() new_net.CopyFrom(net) del (new_net.layer[:]) new_net.layer.extend(new_layers) with open(output_file, 'w') as fp: fp.write(pb.text_format.MessageToString(new_net))
def create_approx_netdef(input_file, output_file, btd_config): with open(input_file, 'r') as fp: net = NetParameter() txtf.Merge(fp.read(), net) new_layers = [] for layer in net.layer: if layer.name not in list(btd_config.keys()): new_layers.append(layer) continue s, t, r = btd_config[layer.name] a, b, c = decompose2abc(layer, s, t, r) new_layers.extend([a, b, c]) new_net = NetParameter() new_net.CopyFrom(net) del (new_net.layer[:]) new_net.layer.extend(new_layers) with open(output_file, 'w') as fp: fp.write(txtf.MessageToString(new_net))
def decompose_layer_fc(txt_idx, orig_net, priv_conf, net, priv_net, orig_file, orig_weight, output_file, output_weight, i, log, net_type, check_set, layer_list): filename = 'stage1_buf_{}.txt'.format(txt_idx) new_conf = priv_conf acc = {} loss = {} for num in range(len(check_set)): new_layers = [] buf_f = open(filename, 'a+') for layer in net.layer: if layer.name in priv_conf["vbmf"]["cv"].keys(): idx = layer_list.index(layer.name) g = layer.convolution_param.group bottom = layer.bottom[0] weight_a = priv_net.params[layer.name + '_v'][0].data weight_b = priv_net.params[layer.name + '_h'][0].data k = check_set[num][idx] * g if bottom == 'data': a, b = cv_channel(layer, k) else: a, b = cv_spatial(layer, k) new_layers.extend([a, b]) new_conf["vbmf"]["cv"][layer.name] = k elif layer.name in priv_conf["vbmf"]["fc"].keys(): idx = layer_list.index(layer.name) weight_a = priv_net.params[layer.name + '_v'][0].data weight_b = priv_net.params[layer.name + '_h'][0].data k = check_set[num][idx] a, b = fc_decomp_2d(layer, k) new_layers.extend([a, b]) new_conf["vbmf"]["fc"][layer.name] = k else: new_layers.append(layer) continue new_net = NetParameter() new_net.CopyFrom(net) del (new_net.layer[:]) new_net.layer.extend(new_layers) # File Write out = os.path.splitext( os.path.basename(output_file))[0] + '_{}_{}.prototxt'.format( i, num) out_dir = os.path.dirname(output_file) comp_file = '{}/{}'.format(out_dir, out) with open(comp_file, 'w') as fp: fp.write(pb.text_format.MessageToString(new_net)) #print '[Layer] Wrote compressed prototxt to: {:s}'.format(comp_file) out = os.path.splitext( os.path.basename(output_weight))[0] + '_{}_{}.caffemodel'.format( i, num) out_dir = os.path.dirname(output_weight) comp_weight = '{}/{}'.format(out_dir, out) # Decomposition decomp_weights(orig_net, num, net.layer, orig_file, orig_weight, new_conf, comp_file, comp_weight, priv_net, check_set, layer_list) # Accuarcy & Loss test out = os.path.splitext(os.path.basename(log))[0] + '_{}_{}.log'.format( i, num) out_dir = os.path.dirname(log) comp_log = '{}/{}'.format(out_dir, out) tmp_acc_loss = call_caffe_test(comp_file, comp_weight, comp_log, txt_idx, net_type) acc[num] = tmp_acc_loss[0][0] loss[num] = tmp_acc_loss[0][1] buf_f.write('{} {} {} {}\n'.format(i, num, acc[num], loss[num])) buf_f.close() os.system('./remove_model.sh {} {}'.format(txt_idx + 1, net_type)) return acc, loss
def test_duplicate(self): fpath = os.path.join(os.path.dirname(ROOT_PKG_PATH), TEST_DATA_DIRNAME, TEST_NET_FILENAME) n1 = Parser().from_net_params_file(fpath) n2 = Parser().from_net_params_file(fpath) n1_tmp = NetParameter(); n1_tmp.CopyFrom(n1) n2_tmp = NetParameter(); n2_tmp.CopyFrom(n2) s = mrg.merge_indep_net_spec([n1_tmp, n2_tmp]) assert_is_not_none(s) assert_is_instance(s, str) assert_greater(len(s), 0) n = NetParameter() text_format.Merge(s, n) assert_is_not_none(n) # Data Layer from first network for l in n.layer: if l.type.lower() == 'data': for l1 in n1.layer: if l1.type.lower() == 'data': dat_phase = [x.phase for x in l.include] # compare test with test and train with train if dat_phase == [x.phase for x in l1.include]: assert_is_not(l.top, l1.top) assert_list_equal(list(l.top), list(l1.top)) assert_equal(l.data_param.source, l1.data_param.source) assert_equal(l.data_param.backend, l1.data_param.backend) assert_equal(l.data_param.batch_size, l1.data_param.batch_size) assert_equal(l.transform_param.scale, l1.transform_param.scale) # For non-data layers # back up merged net for ni in [n1, n2]: for l1 in ni.layer: found = False if l1.type.lower() != 'data': for l in n.layer: if l.type.lower() == l1.type.lower() and \ [t.split('_nidx')[0] for t in l.top] == list(l1.top) and \ [b.split('_nidx')[0] for b in l.bottom] == list(l1.bottom): assert_true(l.name.startswith(l1.name)) fnames1 = [f.name for f in l1.DESCRIPTOR.fields] fnames = [f.name for f in l.DESCRIPTOR.fields] assert_list_equal(fnames, fnames1) l.ClearField('name') l.ClearField('top') l.ClearField('bottom') l1.ClearField('name') l1.ClearField('top') l1.ClearField('bottom') assert_equal(text_format.MessageToString(l), text_format.MessageToString(l1)) found = True else: continue # skip for data layers assert_true(found, "Failed to find %s in merged network!" % (l1.name,))
new_layers = [] for l in net_par.layer: newl = LayerParameter() newl.CopyFrom(l) if l.name in {'mbox_loc','mbox_conf','mbox_priorbox'}: newbot = [e for e in l.bottom if (('14' not in e) and ('15' not in e) and ('16' not in e) and ('17' not in e))] del(newl.bottom[:]) newl.bottom.extend(newbot) new_layers.append(newl) elif (('14' not in l.name) and ('15' not in l.name) and ('16' not in l.name) and ('17' not in l.name)): new_layers.append(newl) newnet_par = NetParameter() newnet_par.CopyFrom(net_par) del(newnet_par.layer[:]) newnet_par.layer.extend(new_layers) with open('models/ssd_face_pruned/face_'+mode+'.prototxt', 'w') as f: f.write(txtf.MessageToString(newnet_par)) new_net = caffe.Net('models/ssd_face_pruned/face_test.prototxt', 'models/ssd_face/best_bn_full.caffemodel', caffe.TEST) #save pruned net parameters new_net.save('models/ssd_face_pruned/short_init.caffemodel') print('\nDeleting layers 14-17')
def make_decomp_file(input_file, conf, output_file, sim_type, weight, output_weight, args): with open(input_file, 'r') as fp: net = NetParameter() pbt.Parse(fp.read(), net) filename = '../../base_models/{}/eigenvalue.conf'.format(args.net_type) f = open(filename, 'w') f.write('{\n\t"layer":{\n') i = 1 idx = 0 new_layers = [] layer_dic = {} for layer in net.layer: if layer.name in conf["vbmf"]["cv"].keys(): print('layer.name = {}'.format(layer.name)) layer_dic[layer.name] = idx bottom = layer.bottom[0] k = (conf["vbmf"]["cv"][layer.name]) if bottom == 'data': a, b = cv_channel(layer, k) new_layers.extend([a, b]) else: g = layer.convolution_param.group a, b = cv_spatial(layer, k * g) new_layers.extend([a, b]) idx += 1 else: if layer.name in conf["vbmf"]["fc"].keys(): layer_dic[layer.name] = idx k = conf["vbmf"]["fc"][layer.name] a, b = fc_decomp_2d(layer, k) new_layers.extend([a, b]) else: new_layers.append(layer) continue new_net = NetParameter() new_net.CopyFrom(net) del (new_net.layer[:]) new_net.layer.extend(new_layers) # File Write - eigenvalue.conf sorted_dic = sorted(layer_dic.items(), key=operator.itemgetter(1)) for j in range(len(sorted_dic) - 1): f.write('"{}": {},\n'.format(sorted_dic[j][0], sorted_dic[j][1])) f.write('"{}": {}\n {}'.format(sorted_dic[len(sorted_dic) - 1][0], sorted_dic[len(sorted_dic) - 1][1], '},')) f.close() # File Write - prototxt out = os.path.splitext( os.path.basename(output_file))[0] + '_{}.prototxt'.format(i) out_dir = os.path.dirname(output_file) filename = '{}/{}'.format(out_dir, out) with open(filename, 'w') as fp: fp.write(pb.text_format.MessageToString(new_net)) print '[Total] Wrote compressed prototxt to: {:s}'.format(filename) if sim_type == 'weight': out = os.path.splitext( os.path.basename(output_weight))[0] + '_{}.caffemodel'.format(i) out_dir = os.path.dirname(output_weight) out_weight = '{}/{}'.format(out_dir, out) decomp_weights(net.layer, input_file, weight, conf, filename, out_weight, i, args)