Exemplo n.º 1
0
 def test_bidi_rnn_layer_y_summarize(self):
     """
     Test bidirectional summarizing RNN layer in y-dimension.
     """
     rnn = layers.TransposedSummarizingRNN(10, 2, 'b', True, True)
     o = rnn(torch.randn(1, 10, 32, 64))
     self.assertEqual(o.shape, (1, 4, 1, 64))
Exemplo n.º 2
0
 def test_forward_rnn_layer_x_summarize(self):
     """
     Test unidirectional summarizing RNN layer in x-dimension.
     """
     rnn = layers.TransposedSummarizingRNN(10, 2, 'f', False, True)
     o = rnn(torch.randn(1, 10, 32, 64))
     self.assertEqual(o.shape, (1, 2, 32, 1))
Exemplo n.º 3
0
 def test_bidi_rnn_layer_x(self):
     """
     Test bidirectional RNN layer in x-dimension.
     """
     rnn = layers.TransposedSummarizingRNN(10, 2, 'b', False, False)
     o = rnn(torch.randn(1, 10, 32, 64))
     self.assertEqual(o[0].shape, (1, 4, 32, 64))
Exemplo n.º 4
0
 def build_rnn(
     self, input: Tuple[int, int, int, int], block: str
 ) -> Union[Tuple[None, None, None], Tuple[Tuple[int, int, int, int], str,
                                           Callable]]:
     """
     Builds an LSTM/GRU layer returning number of outputs and layer.
     """
     pattern = re.compile(
         r'(?P<type>L|G)(?P<dir>f|r|b)(?P<dim>x|y)(?P<sum>s)?(?P<legacy>c|o)?(?P<name>{\w+})?(?P<out>\d+)'
     )
     m = pattern.match(block)
     if not m:
         return None, None, None
     type = m.group('type')
     direction = m.group('dir')
     dim = m.group('dim') == 'y'
     summarize = m.group('sum') == 's'
     legacy = None
     if m.group('legacy') == 'c':
         legacy = 'clstm'
     elif m.group('legacy') == 'o':
         legacy = 'ocropy'
     hidden = int(m.group(7))
     fn = layers.TransposedSummarizingRNN(input[1], hidden, direction, dim,
                                          summarize, legacy)
     logger.debug(
         '{}\t\trnn\tdirection {} transposed {} summarize {} out {} legacy {}'
         .format(self.idx + 1, direction, dim, summarize, hidden, legacy))
     return fn.get_shape(input), self.get_layer_name(type,
                                                     m.group('name')), fn
Exemplo n.º 5
0
 def build_rnn(
     self, input: Tuple[int, int, int, int], blocks: List[str], idx: int
 ) -> Union[Tuple[None, None, None], Tuple[Tuple[int, int, int, int], str,
                                           Callable]]:
     """
     Builds an LSTM/GRU layer returning number of outputs and layer.
     """
     pattern = re.compile(
         r'(?P<type>L|G)(?P<dir>f|r|b)(?P<dim>x|y)(?P<sum>s)?(?P<legacy>c|o)?(?P<name>{\w+})?(?P<out>\d+)'
     )
     m = pattern.match(blocks[idx])
     if not m:
         return None, None, None
     type = m.group('type')
     direction = m.group('dir')
     dim = m.group('dim') == 'y'
     summarize = m.group('sum') == 's'
     legacy = None
     if m.group('legacy') == 'c':
         legacy = 'clstm'
     elif m.group('legacy') == 'o':
         legacy = 'ocropy'
     hidden = int(m.group(7))
     fn = layers.TransposedSummarizingRNN(input[1], hidden, direction, dim,
                                          summarize, legacy)
     self.idx += 1
     logger.debug(
         f'{self.idx}\t\trnn\tdirection {direction} transposed {dim} '
         f'summarize {summarize} out {hidden} legacy {legacy}')
     return fn.get_shape(input), [
         VGSLBlock(blocks[idx], type, m.group('name'), self.idx)
     ], fn