def forward(net, input_data, deploy=False): """Defines and creates the ReInspect network given the net, input data and configurations.""" net.clear_forward() if deploy: image = np.array(input_data["image"]) else: image = np.array(input_data["image"]) label = np.array(input_data["label"]) net.f(NumpyData("label", data=label)) net.f(NumpyData("image", data=image)) generate_decapitated_alexnet(net) net.f( InnerProduct(name="fc8_dish", bottoms=["fc7"], param_lr_mults=[1.0 * 10, 2.0 * 10], param_decay_mults=[1.0, 0.0], weight_filler=Filler("gaussian", 0.01), bias_filler=Filler("constant", 0.0), num_output=128)) net.f(Softmax("dish_probs", bottoms=["fc8_dish"])) if not deploy: net.f(SoftmaxWithLoss(name="loss", bottoms=["fc8_dish", "label"])) # net.f(Accuracy(name="dish_accuracy",bottoms=["fc8_dish_23", "label"])) if deploy: probs = np.array(net.blobs["dish_probs"].data) return probs else: return None
def forward(net, sentence_batches): net.clear_forward() batch = next(sentence_batches) sentence_batch = pad_batch(batch) length = min(sentence_batch.shape[1], 100) assert length > 0 net.f(NumpyData('lstm_seed', np.zeros((batch_size, dimension)))) for step in range(length): if step == 0: prev_hidden = 'lstm_seed' prev_mem = 'lstm_seed' word = np.zeros(sentence_batch[:, 0].shape) else: prev_hidden = 'lstm%d_hidden' % (step - 1) prev_mem = 'lstm%d_mem' % (step - 1) word = sentence_batch[:, step - 1] net.f(NumpyData('word%d' % step, word)) net.f( Wordvec('wordvec%d' % step, dimension, vocab_size, bottoms=['word%d' % step], param_names=['wordvec_param'])) net.f( Concat('lstm_concat%d' % step, bottoms=[prev_hidden, 'wordvec%d' % step])) net.f( LstmUnit('lstm%d' % step, bottoms=['lstm_concat%d' % step, prev_mem], param_names=[ 'lstm_input_value', 'lstm_input_gate', 'lstm_forget_gate', 'lstm_output_gate' ], tops=['lstm%d_hidden' % step, 'lstm%d_mem' % step], num_cells=dimension)) net.f( Dropout('dropout%d' % step, 0.16, bottoms=['lstm%d_hidden' % step])) net.f(NumpyData('label%d' % step, sentence_batch[:, step])) net.f( InnerProduct('ip%d' % step, vocab_size, bottoms=['dropout%d' % step], param_names=['softmax_ip_weights', 'softmax_ip_bias'])) net.f( SoftmaxWithLoss('softmax_loss%d' % step, ignore_label=zero_symbol, bottoms=['ip%d' % step, 'label%d' % step]))
def generate_losses(net, net_config): """Generates the two losses used for ReInspect. The hungarian loss and the final box_loss, that represents the final softmax confidence loss""" net.f(""" name: "hungarian" type: "HungarianLoss" bottom: "bbox_concat" bottom: "boxes" bottom: "box_flags" top: "hungarian" top: "box_confidences" top: "box_assignments" loss_weight: %s hungarian_loss_param { match_ratio: 0.5 permute_matches: true }""" % net_config["hungarian_loss_weight"]) net.f(SoftmaxWithLoss("box_loss", bottoms=["score_concat", "box_confidences"]))
def forward(net, input_data, net_config, deploy=False): """Defines and creates the ReInspect network given the net, input data and configurations.""" net.clear_forward() net.f( NumpyData("wordvec_layer", data=np.array(input_data["wordvec_layer"]))) # 128*38*100*1 net.f(NumpyData("target_words", data=np.array(input_data["target_words"]))) # 128*100*1*1 tops = [] slice_point = [] for i in range(net_config['max_len']): tops.append('label%d' % i) if i != 0: slice_point.append(i) net.f( Slice("label_slice_layer", slice_dim=1, bottoms=["target_words"], tops=tops, slice_point=slice_point)) tops = [] slice_point = [] for i in range(net_config['max_len']): tops.append('target_wordvec%d_4d' % i) if i != 0: slice_point.append(i) net.f( Slice("wordvec_slice_layer", slice_dim=2, bottoms=['wordvec_layer'], tops=tops, slice_point=slice_point)) for i in range(net_config["max_len"]): # 128*38*1*1 -> 128*38 net.f(""" name: "target_wordvec%d" type: "Reshape" bottom: "target_wordvec%d_4d" top: "target_wordvec%d" reshape_param { shape { dim: 0 # copy the dimension from below dim: -1 } } """ % (i, i, i)) #net.f(Reshape('target_wordvec%d'%i, bottoms = ['target_wordvec%d_4d'%i], shape = [0,-1])) filler = Filler("uniform", net_config["init_range"]) for i in range(net_config['max_len']): if i == 0: net.f( NumpyData( "dummy_layer", np.zeros((net_config["batch_size"], net_config["lstm_num_cells"])))) net.f( NumpyData( "dummy_mem_cell", np.zeros((net_config["batch_size"], net_config["lstm_num_cells"])))) for j in range(net_config['lstm_num_stacks']): bottoms = [] if j == 0: bottoms.append('target_wordvec%d' % i) if j >= 1: bottoms.append('dropout%d_%d' % (j - 1, i)) if i == 0: bottoms.append("dummy_layer") else: bottoms.append('lstm%d_hidden%d' % (j, i - 1)) net.f(Concat('concat%d_layer%d' % (j, i), bottoms=bottoms)) param_names = [] for k in range(4): param_names.append('lstm%d_param_%d' % (j, k)) bottoms = ['concat%d_layer%d' % (j, i)] if i == 0: bottoms.append('dummy_mem_cell') else: bottoms.append('lstm%d_mem_cell%d' % (j, i - 1)) net.f( LstmUnit('lstm%d_layer%d' % (j, i), net_config["lstm_num_cells"], weight_filler=filler, param_names=param_names, bottoms=bottoms, tops=[ 'lstm%d_hidden%d' % (j, i), 'lstm%d_mem_cell%d' % (j, i) ])) net.f( Dropout('dropout%d_%d' % (j, i), net_config["dropout_ratio"], bottoms=['lstm%d_hidden%d' % (j, i)])) bottoms = [] for i in range(net_config['max_len']): bottoms.append('dropout%d_%d' % (net_config['lstm_num_stacks'] - 1, i)) net.f(Concat('hidden_concat', bottoms=bottoms, concat_dim=0)) net.f( InnerProduct("inner_product", net_config['vocab_size'], bottoms=["hidden_concat"], weight_filler=filler)) bottoms = [] for i in range(net_config['max_len']): bottoms.append('label%d' % i) net.f(Concat('label_concat', bottoms=bottoms, concat_dim=0)) if deploy: net.f(Softmax("word_probs", bottoms=["inner_product"])) else: net.f( SoftmaxWithLoss("word_loss", bottoms=["inner_product", "label_concat"], ignore_label=net_config['zero_symbol']))
def forward(net, input_data, net_config, phase='train', deploy=False): """Defines and creates the ReInspect network given the net, input data and configurations.""" net.clear_forward() batch_ws_i = input_data["ws_i"] batch_stop_i = [net_config['max_len']] * net_config['batch_size'] wordvec_layer = input_data["wordvec_layer"] # 128*38*100*1 net.f(NumpyData("target_words", data=np.array(input_data["target_words"]))) # 128*100*1*1 tops = [] slice_point = [] for i in range(net_config['max_len']): tops.append('label%d' % i) if i != 0: slice_point.append(i) net.f( Slice("label_slice_layer", slice_dim=1, bottoms=["target_words"], tops=tops, slice_point=slice_point)) net.f(NumpyData("target_wordvec%d" % 0, data=wordvec_layer[:, :, 0, 0])) # start symbol, 128*38 filler = Filler("uniform", net_config["init_range"]) for i in range(net_config['max_len']): if i == 0: net.f( NumpyData( "dummy_layer", np.zeros((net_config["batch_size"], net_config["lstm_num_cells"])))) net.f( NumpyData( "dummy_mem_cell", np.zeros((net_config["batch_size"], net_config["lstm_num_cells"])))) for j in range(net_config['lstm_num_stacks']): bottoms = [] if j == 0: bottoms.append('target_wordvec%d' % i) if j >= 1: bottoms.append('dropout%d_%d' % (j - 1, i)) if i == 0: bottoms.append("dummy_layer") else: bottoms.append('lstm%d_hidden%d' % (j, i - 1)) net.f(Concat('concat%d_layer%d' % (j, i), bottoms=bottoms)) param_names = [] for k in range(4): param_names.append('lstm%d_param_%d' % (j, k)) bottoms = ['concat%d_layer%d' % (j, i)] if i == 0: bottoms.append('dummy_mem_cell') else: bottoms.append('lstm%d_mem_cell%d' % (j, i - 1)) net.f( LstmUnit('lstm%d_layer%d' % (j, i), net_config["lstm_num_cells"], weight_filler=filler, param_names=param_names, bottoms=bottoms, tops=[ 'lstm%d_hidden%d' % (j, i), 'lstm%d_mem_cell%d' % (j, i) ])) net.f( Dropout('dropout%d_%d' % (j, i), net_config["dropout_ratio"], bottoms=['lstm%d_hidden%d' % (j, i)])) net.f( InnerProduct("ip%d" % i, net_config['vocab_size'], bottoms=[ 'dropout%d_%d' % (net_config['lstm_num_stacks'] - 1, i) ], weight_filler=filler)) if i < net_config['max_len'] - 1: tar_wordvec = np.array(wordvec_layer[:, :, i + 1, 0]) # 128*38 if phase == 'test': net.f(Softmax("word_probs%d" % i, bottoms=["ip%d" % i])) probs = net.blobs["word_probs%d" % i].data for bi in range(net_config['batch_size']): if i >= batch_ws_i[bi] and i < batch_stop_i[bi]: vec = [0] * net_config["vocab_size"] peakIndex = np.argmax(probs[bi, :]) if peakIndex == net_config['whitespace_symbol']: batch_stop_i[bi] = i + 1 vec[peakIndex] = 1 tar_wordvec[bi, :] = vec net.f(NumpyData("target_wordvec%d" % (i + 1), data=tar_wordvec)) bottoms = [] for i in range(net_config['max_len']): bottoms.append("ip%d" % i) net.f(Concat('ip_concat', bottoms=bottoms, concat_dim=0)) bottoms = [] for i in range(net_config['max_len']): bottoms.append('label%d' % i) net.f(Concat('label_concat', bottoms=bottoms, concat_dim=0)) if deploy: net.f(Softmax("word_probs", bottoms=["ip_concat"])) net.f( SoftmaxWithLoss("word_loss", bottoms=["ip_concat", "label_concat"], ignore_label=net_config['zero_symbol']))