def __init__(self, vocab_size, n_bits, n_hidden): super(ReinforcedReceiver, self).__init__() self.emb_message = core.RelaxedEmbedding(vocab_size, n_hidden) self.emb_column = nn.Linear(n_bits, n_hidden) self.fc1 = nn.Linear(2 * n_hidden, 2 * n_hidden) self.fc2 = nn.Linear(2 * n_hidden, n_bits)
def __init__(self, vocab_size, n_classes, deeper): super(Receiver, self).__init__() self.message_inp = core.RelaxedEmbedding(vocab_size, 400) if not deeper: self.fc = nn.Linear(400, n_classes) else: self.fc1 = nn.Linear(400, 400) self.fc2 = nn.Linear(400, n_classes) self.deeper = deeper
def __init__(self, vocab_size, n_classes, n_hidden=0): super(Receiver, self).__init__() self.message_inp = core.RelaxedEmbedding(vocab_size, 400) hidden = [] for _ in range(n_hidden): hidden.extend([ nn.LeakyReLU(), nn.Linear(400, 400), ]) self.hidden = nn.Sequential(*hidden) self.fc = nn.Linear(400, n_classes)
def __init__(self, vocab_size, n_classes): super(Receiver, self).__init__() self.message_inp = core.RelaxedEmbedding(vocab_size, 400) self.fc = nn.Linear(400, n_classes)
def __init__(self, other_agent, vocab_size1, vocab_size2): super(Proxy, self).__init__() self.inp = core.RelaxedEmbedding(vocab_size1, vocab_size2) self.other_agent = other_agent