Example #1
0
 def __iter__(self):
     # NB: we use 'imap' and not 'map' here, so that in Python 2 we get a
     # generator and don't eagerly perform all the indexes.  This could
     # save us work, and also helps keep trace ordering deterministic
     # (e.g., if you zip(*hiddens), the eager map will force all the
     # indexes of hiddens[0] before hiddens[1], while the generator
     # map will interleave them.)
     return iter(imap(lambda i: self[i], range(self.size(0))))
Example #2
0
 def __iter__(self):
     # NB: we use 'imap' and not 'map' here, so that in Python 2 we get a
     # generator and don't eagerly perform all the indexes.  This could
     # save us work, and also helps keep trace ordering deterministic
     # (e.g., if you zip(*hiddens), the eager map will force all the
     # indexes of hiddens[0] before hiddens[1], while the generator
     # map will interleave them.)
     return iter(imap(lambda i: self[i], range(self.size(0))))
Example #3
0
 def __iter__(self):
     # NB: we use 'imap' and not 'map' here, so that in Python 2 we get a
     # generator and don't eagerly perform all the indexes.  This could
     # save us work, and also helps keep trace ordering deterministic
     # (e.g., if you zip(*hiddens), the eager map will force all the
     # indexes of hiddens[0] before hiddens[1], while the generator
     # map will interleave them.)
     if self.dim() == 0:
         raise TypeError('iteration over a 0-d tensor')
     if torch._C._get_tracing_state():
         warnings.warn('Iterating over a tensor might cause the trace to be incorrect. '
                       'Passing a tensor of different shape won\'t change the number of '
                       'iterations executed (and might lead to errors or silently give '
                       'incorrect results).', category=RuntimeWarning)
     return iter(imap(lambda i: self[i], range(self.size(0))))