Exemple #1
0
def random_seed(seed):
    """Execute code inside this with-block using the specified random seed.

    Sets the seed for random, numpy.random and torch (CPU).

    WARNING: torch GPU seeds are NOT set!

    Does not affect the state of random number generators outside this block.
    Not thread-safe.

    Args:
        seed (int)
    """
    state = RandomState()
    random.seed(seed)  # alter state
    np.random.seed(seed)
    torch.manual_seed(seed)
    yield
    state.set_global()
Exemple #2
0
    def __init__(self, model, optim, src_dict, trg_dict, model_params=None, gpu_ids=None, random_seed=None):
        self._logger = logging.getLogger('nmmt.NMTEngineTrainer')
        self._log_level = logging.INFO

        self._model = model
        self._optim = optim
        self._src_dict = src_dict
        self._trg_dict = trg_dict
        self._model_params = model_params
        self._gpu_ids = gpu_ids

        if random_seed is not None:
            torch.manual_seed(random_seed)
            random.manual_seed_all(random_seed)

        # Public-editable options
        self.log_interval = 50  # Log status every 'log_interval' updates
        self.batch_size = 64  # Maximum batch size
        self.max_generator_batches = 32  # Maximum batches of words in a seq to run the generator on in parallel.
        self.max_epochs = 40  # Maximum number of training epochs
        self.min_epochs = 10  # Minimum number of training epochs
        self.start_epoch = 1  # The epoch from which to start
        self.min_perplexity_decrement = .02  # If perplexity decrement is lower than this percentage, stop training
Exemple #3
0
# load mosi
mm_dset = MultiModalDataset(dataset, task, approach)

# get dataset length
l = len(mm_dset)

# split dataset to train-valid-test
test_size = int(0.2 * l)
train_size = l - 2 * test_size

# reproducability
if DEVICE == "cuda:1":
    torch.backends.cudnn.deterministic = True

torch.manual_seed(64)
mm_train, mm_valid, mm_test = random_split(mm_dset,
                                           [train_size, test_size, test_size])
# use dataloaders wrappers
train_loader = DataLoader(mm_train, batch_size=BATCH_SIZE, shuffle=True)
valid_loader = DataLoader(mm_valid)
test_loader = DataLoader(mm_test)

#######################################
#### text rnn hyperparams       ######
######################################
text_hyperparameters = []
input_size = 300  # glove size
hidden_size = 64 * 2  # hidden state size
num_layers = 1  # how many stacked rnn's
bidirectional = True