Exemple #1
0
    def read_dynamic(hf: h5py.File,
                     key: str,
                     begin: int,
                     end: int,
                     *,
                     shared: bool = False) -> TensorList:
        try:
            offsets_ds = hf[f"{key}_offsets"]
            data_ds = hf[f"{key}_data"]
        except LookupError:
            return TensorList.empty(num_tensors=end - begin)

        allocator = allocate_shared_tensor if shared else torch.empty
        offsets = allocator((end - begin + 1, ), dtype=torch.long)
        offsets_ds.read_direct(offsets.numpy(),
                               source_sel=np.s_[begin:end + 1])
        data_begin = offsets[0].item()
        data_end = offsets[-1].item()
        data = allocator((data_end - data_begin, ), dtype=torch.long)
        # Needed because https://github.com/h5py/h5py/issues/870.
        if data_end - data_begin > 0:
            data_ds.read_direct(data.numpy(),
                                source_sel=np.s_[data_begin:data_end])

        offsets -= int(offsets[0])

        return TensorList(offsets, data)
 def test_from_tensor(self):
     self.assertEqual(
         EntityList.from_tensor(torch.tensor([3, 4], dtype=torch.long)),
         EntityList(
             torch.tensor([3, 4], dtype=torch.long), TensorList.empty(num_tensors=2)
         ),
     )
 def test_empty(self):
     self.assertEqual(
         EntityList.empty(),
         EntityList(torch.empty((0, ), dtype=torch.long),
                    TensorList.empty()),
     )
Exemple #4
0
 def from_tensor(cls, tensor: LongTensorType) -> 'EntityList':
     if tensor.dim() != 1:
         raise ValueError("Expected 1D tensor, got %dD" % tensor.dim())
     tensor_list = TensorList.empty(num_tensors=tensor.shape[0])
     return cls(tensor, tensor_list)
Exemple #5
0
 def empty(cls) -> 'EntityList':
     return cls(torch.empty((0, ), dtype=torch.long), TensorList.empty())