Ejemplo n.º 1
0
    def __init__(
        self,
        encoder_params,
        linear_params,
        batch_size=32,
        seed=1,
    ):
        """Neural network-based classifier

        Arguments:
            encoder_params {Dict[str, Any]} -- encoder parameters
            linear_params {Dict[str, Any]} -- dense layer parameters

        Keyword arguments:
            batch_size {int} -- batch size (default: {32})
            seed {int} -- random seed (default: {1})
        """
        super(Model, self).__init__()
        random.seed(seed)
        torch.manual_seed(seed)
        self.util = Util()
        self.batch_size = batch_size
        self.use_cuda = self.util.use_cuda
        self.encoders = self._init_encoders(encoder_params)
        self.linears = self._init_linears(linear_params)
        self.optimizer = optim.SGD(self.parameters(), lr=0.01)
        self.criterion = nn.NLLLoss()
Ejemplo n.º 2
0
    def __init__(self, examples, x_to_index=None, isregression=False):
        self.util = Util()
        self.pad_index = self.util.PAD_INDEX
        self.unk_index = self.util.UNK_INDEX

        X_sets = [[example['Xs'][i] for example in examples]
                  for i in range(len(examples[0]['Xs']))]

        self.x_to_index = x_to_index
        if x_to_index is None:
            self.x_to_index = []
            for i in range(len(examples[0]['Xs'])):
                xs = [x for X in X_sets[i] for x in X]
                self.x_to_index.append(self._make_index(xs))

        self.Xs = []
        self.raw_Xs = []  # for debug
        for i in range(len(examples[0]['Xs'])):
            self.Xs.append(self._degitize(X_sets[i], self.x_to_index[i]))
            self.raw_Xs.append(X_sets[i])

        # indices
        self.indices = [example['index'] for example in examples]

        if isregression:
            self.ys = [math.log10(example['y']) for example in examples]
        else:
            self.ys = [example['y'] for example in examples]
Ejemplo n.º 3
0
    def __init__(self,
                 xdim,
                 edim,
                 hdim,
                 lnum,
                 use_bidirectional=True,
                 use_lstm=True,
                 dropout=0.2,
                 **kwargs):
        """RNN encoder

        Arguments:
            xdim {int} -- input feature dimension
            edim {int} -- embedding dimension
            hdim {int} -- hidden vector dimension
            lnum {int} -- number of stacked RNN layers

        Keyword Arguments:
            use_bidirectional {bool} -- if True, it uses bidirectional RNN (default: {True})  # NOQA
            use_lstm {bool} -- if True, it uses LSTM (default: {True})
            dropout {float} -- dropout ratio (default: {0.2})
        """
        super(RecurrentEncoder, self).__init__()
        self.util = Util()
        self.pad_index = self.util.PAD_INDEX
        self.xdim = xdim
        self.edim = edim
        self.hdim = hdim
        self.lnum = lnum
        self.use_bidirectional = use_bidirectional
        self.use_lstm = use_lstm
        self.dropout = dropout
        self.use_cuda = self.util.use_cuda
        self.embedding = self._init_embedding()
        self.rnn = self._init_rnn()
Ejemplo n.º 4
0
    def __init__(self, xdim, edim, dropout=0.2, **kwargs):
        """Averaging word vectors encoder

        Arguments:
            xdim {int} -- input feature dimension
            edim {int} -- embedding dimension

        Keyword Arguments:
            dropout {float} -- dropout ratio (default: {0.2})
        """
        super(AverageEncoder, self).__init__()
        self.util = Util()
        self.pad_index = self.util.PAD_INDEX
        self.xdim = xdim
        self.edim = edim
        self.dropout = dropout
        self.use_cuda = self.util.use_cuda
        self.embedding = self._init_embedding()
Ejemplo n.º 5
0
    def __init__(
        self,
        xdim,
        edim,
        **kwargs
    ):
        """Averaging word vectors encoder

        Arguments:
            xdim {int} -- input feature dimension
            edim {int} -- embedding dimension
        """
        super(AverageEncoder, self).__init__()
        self.util = Util()
        self.pad_index = self.util.PAD_INDEX
        self.xdim = xdim
        self.edim = edim
        self.use_cuda = self.util.use_cuda
        self.embedding = self._init_embedding()
Ejemplo n.º 6
0
Archivo: model.py Proyecto: himkt/msnc
    def __init__(
        self,
        encoder_params,
        linear_params,
        epoch_num=100,
        checkpoint_interval=10,
        batch_size=32,
        seed=1,
        save_best_model=True,
    ):
        """Neural Network based classifier

        Arguments:
            encoder_params {Dict[str, Any]} -- encoder parameters
            linear_params {Dict[str, Any]} -- dense layer parameters

        Keyword Arguments:
            epoch_num {int} -- number of epochs (default: {100})
            checkpoint_interval {int} -- it creates checkpoints at {checkpoint_interval} (default: {10})  # NOQA
            batch_size {int} -- batch sizze (default: {32})
            seed {int} -- random seed (default: {1})
        """
        super(Model, self).__init__()
        random.seed(seed)
        torch.manual_seed(seed)
        self.util = Util()
        self.epoch_num = epoch_num
        self.checkpoint_interval = checkpoint_interval
        self.batch_size = batch_size
        self.use_cuda = self.util.use_cuda
        self.encoders = self._init_encoders(encoder_params)
        self.linears = self._init_linears(linear_params)
        self.optimizer = optim.SGD(self.parameters(), lr=0.01)
        self.criterion = nn.NLLLoss()

        self._best_dev_accuracy = None
        self._best_epoch = None
        self._log = None
        self._save_best_model = save_best_model
Ejemplo n.º 7
0
    def __init__(
        self,
        xdim,
        edim,
        hdim,
        lnum,
        use_bidirectional=True,
        use_lstm=True,
        dropout=0.2,
        **kwargs
    ):
        """RNN encoder

        Arguments:
            xdim {int} -- Size of a vocabulay
            edim {int} -- Dimension of an embedding layer
            hdim {int} -- Dimension of a hidden layer
            lnum {int} -- Number of stacked RNN layers

        Keyword Arguments:
            use_bidirectional {bool} -- Use bidirectional RNN (default: {True})
            use_lstm {bool} -- If True, use LSTM, else GRU (default: {True})
            dropout {float} -- dropout ratio (default: {0.2})
        """
        super(RecurrentEncoder, self).__init__()
        self.util = Util()
        self.pad_index = self.util.PAD_INDEX
        self.xdim = xdim
        self.edim = edim
        self.hdim = hdim
        self.lnum = lnum
        self.use_bidirectional = use_bidirectional
        self.use_lstm = use_lstm
        self.dropout = dropout
        self.use_cuda = self.util.use_cuda
        self.embedding = self._init_embedding()
        self.rnn = self._init_rnn()