Exemple #1
0
    def _lstm_test(self, layers, bidirectional, initial_state,
                   packed_sequence, dropout):
        model = LstmFlatteningResult(
            RNN_INPUT_SIZE, RNN_HIDDEN_SIZE, layers,
            bidirectional=bidirectional, dropout=dropout)
        if packed_sequence:
            model = RnnModelWithPackedSequence(model)

        seq_lengths = np.random.randint(1, RNN_SEQUENCE_LENGTH + 1, size=RNN_BATCH_SIZE)
        seq_lengths = list(reversed(sorted(map(int, seq_lengths))))
        inputs = [ Variable(torch.randn(l, RNN_INPUT_SIZE)) for l in seq_lengths ]
        inputs = [rnn_utils.pad_sequence(inputs)]

        directions = 2 if bidirectional else 1

        if initial_state:
            h0 = Variable(torch.randn(directions * layers, RNN_BATCH_SIZE, RNN_HIDDEN_SIZE))
            c0 = Variable(torch.randn(directions * layers, RNN_BATCH_SIZE, RNN_HIDDEN_SIZE))
            inputs.append((h0, c0))
        if packed_sequence:
            inputs.append(Variable(torch.IntTensor(seq_lengths)))
        if len(inputs) == 1:
            input = inputs[0]
        else:
            input = tuple(inputs)
        self.run_model_test(model, train=False, batch_size=RNN_BATCH_SIZE, input=input, use_gpu=False)
    def _lstm_test(self, layers, bidirectional, initial_state, packed_sequence,
                   dropout):
        model = LstmFlatteningResult(RNN_INPUT_SIZE,
                                     RNN_HIDDEN_SIZE,
                                     layers,
                                     bidirectional=bidirectional,
                                     dropout=dropout)
        if packed_sequence == 1:
            model = RnnModelWithPackedSequence(model, False)
        if packed_sequence == 2:
            model = RnnModelWithPackedSequence(model, True)

        def make_input(batch_size):
            seq_lengths = np.random.randint(1,
                                            RNN_SEQUENCE_LENGTH + 1,
                                            size=batch_size)
            seq_lengths = list(reversed(sorted(map(int, seq_lengths))))
            inputs = [
                Variable(torch.randn(l, RNN_INPUT_SIZE)) for l in seq_lengths
            ]
            inputs = rnn_utils.pad_sequence(inputs)
            if packed_sequence == 2:
                inputs = inputs.transpose(0, 1)
            inputs = [inputs]

            directions = 2 if bidirectional else 1

            if initial_state:
                h0 = Variable(
                    torch.randn(directions * layers, batch_size,
                                RNN_HIDDEN_SIZE))
                c0 = Variable(
                    torch.randn(directions * layers, batch_size,
                                RNN_HIDDEN_SIZE))
                inputs.append((h0, c0))
            if packed_sequence != 0:
                inputs.append(Variable(torch.IntTensor(seq_lengths)))
            if len(inputs) == 1:
                input = inputs[0]
            else:
                input = tuple(inputs)
            return input

        input = make_input(RNN_BATCH_SIZE)
        self.run_model_test(model,
                            train=False,
                            batch_size=RNN_BATCH_SIZE,
                            input=input,
                            use_gpu=False)

        # test that the model still runs with a different batch size
        onnxir, _ = do_export(model, input)
        other_input = make_input(RNN_BATCH_SIZE + 1)
        _ = run_embed_params(onnxir, model, other_input, use_gpu=False)
Exemple #3
0
    def _lstm_test(self, layers, bidirectional, initial_state, packed_sequence,
                   dropout):
        batch_first = True if packed_sequence == 2 else False
        model = LstmFlatteningResult(RNN_INPUT_SIZE,
                                     RNN_HIDDEN_SIZE,
                                     layers,
                                     bidirectional=bidirectional,
                                     dropout=dropout,
                                     batch_first=batch_first)
        if packed_sequence == 1:
            model = RnnModelWithPackedSequence(model, False)
        if packed_sequence == 2:
            model = RnnModelWithPackedSequence(model, True)

        def make_input(batch_size):
            seq_lengths = np.random.randint(1,
                                            RNN_SEQUENCE_LENGTH + 1,
                                            size=batch_size)
            seq_lengths = list(reversed(sorted(map(int, seq_lengths))))
            inputs = [torch.randn(l, RNN_INPUT_SIZE) for l in seq_lengths]
            inputs = rnn_utils.pad_sequence(inputs, batch_first=batch_first)
            inputs = [inputs]

            directions = 2 if bidirectional else 1

            if initial_state:
                h0 = torch.randn(directions * layers, batch_size,
                                 RNN_HIDDEN_SIZE)
                c0 = torch.randn(directions * layers, batch_size,
                                 RNN_HIDDEN_SIZE)
                inputs.append((h0, c0))
            if packed_sequence != 0:
                inputs.append(torch.IntTensor(seq_lengths))
            if len(inputs) == 1:
                input = inputs[0]
            else:
                input = tuple(inputs)
            return input

        input = make_input(RNN_BATCH_SIZE)
        self.run_test(model, input, batch_size=RNN_BATCH_SIZE)

        # test that the model still runs with a different batch size
        other_input = make_input(RNN_BATCH_SIZE + 1)
        self.run_test(model, other_input, batch_size=RNN_BATCH_SIZE + 1)