예제 #1
0
 def run_spinn(self, example, use_internal_parser=False, validate_transitions=True):
     state_both, transition_acc, transition_loss = super(SentencePairModel, self).run_spinn(
         example, use_internal_parser, validate_transitions)
     batch_size = len(state_both) / 2
     h_premise = get_h(torch.cat(state_both[:batch_size], 0), self.hidden_dim)
     h_hypothesis = get_h(torch.cat(state_both[batch_size:], 0), self.hidden_dim)
     return [h_premise, h_hypothesis], transition_acc, transition_loss
예제 #2
0
 def run_spinn(self,
               example,
               use_internal_parser=False,
               validate_transitions=True):
     state, transition_acc, transition_loss = super(
         SentenceModel, self).run_spinn(example, use_internal_parser,
                                        validate_transitions)
     h = get_h(torch.cat(state, 0), self.hidden_dim)
     return [h], transition_acc, transition_loss
예제 #3
0
    def reduce_phase_hook(self, lefts, rights, trackings, reduce_stacks, r_idxs=None):

        for idx, stack in izip(r_idxs, reduce_stacks):
            h = get_h(stack[-1], self.hidden_dim)
            assert h.size() == torch.Size([1, self.hidden_dim]), 'hsize: {}'.format(h.size())
            self.states[idx].append(h)
예제 #4
0
 def shift_phase_hook(self, tops, trackings, stacks, idxs):
     # print 'shift_phase_hook...'
     for idx, stack in izip(idxs, stacks):
         h = get_h(stack[-1], self.hidden_dim)
         assert h.size() == torch.Size([1, self.hidden_dim]), 'hsize: {}'.format(h.size())
         self.states[idx].append(h)