예제 #1
0
    def __init__(self, N, M):
        """Initialize the NTM Memory matrix.

        The memory's dimensions are (batch_size x N x M).
        Each batch has it's own memory matrix.

        :param N: Number of rows in the memory.
        :param M: Number of columns/features in the memory.
        """
        super(NTMMemory, self).__init__()

        self.N = N
        self.M = M
        # The memory bias allows the heads to learn how to initially address
        # memory locations by content
        self.register_buffer('mem_bias', torch.Tensor(N, M))
        self.memory = None
        self.init_mem = None
        # Initialize memory bias
        stdev = 1 / (np.sqrt(N + M))
        nn.init.uniform_(self.mem_bias, -stdev, stdev)

        conf = Config()
        lower_bound, upper_bound = conf.output_range()
        threshold = conf.get_threshold()
        length = conf.get_encoding_length()
        mem = []
        for num in range(lower_bound, upper_bound + 1):
            mem.append(enc(num, threshold, length))
        self.memory = torch.stack(mem, dim=0).float()
        self.init_mem = torch.stack(mem, dim=0).float()
예제 #2
0
def encode(inarr):
    conf = Config()
    result = []
    for element in inarr:
        result.append(
            encoder(element, conf.get_threshold(), conf.get_encoding_length()))
    return torch.stack(result, dim=0)