Пример #1
0
def reduce_window_shape_tuple(operand_shape, window_dimensions, window_strides,
                              padding):
  window_dimensions = (1,) + window_dimensions + (1,)
  window_strides = (1,) + window_strides + (1,)
  pads = padtype_to_pads(operand_shape, window_dimensions, window_strides, padding)
  operand_padded = np.add(operand_shape, np.add(*zip(*pads)))
  t = np.floor_divide(
      np.subtract(operand_padded, window_dimensions), window_strides) + 1
  return tuple(t)
Пример #2
0
    def testDatasetInterop(self):
        values = [1, 2, 3, 4, 5, 6]
        values_as_array = np.asarray(values)

        # Tensor dataset
        dataset = tf.data.Dataset.from_tensors(values_as_array)

        for value, value_from_dataset in zip([values_as_array], dataset):
            self.assertIsInstance(value_from_dataset, np.ndarray)
            self.assertAllEqual(value_from_dataset, value)

        # Tensor slice dataset
        dataset = tf.data.Dataset.from_tensor_slices(values_as_array)

        for value, value_from_dataset in zip(values, dataset):
            self.assertIsInstance(value_from_dataset, np.ndarray)
            self.assertAllEqual(value_from_dataset, value)

        # # TODO(nareshmodi): as_numpy_iterator() doesn't work.
        # items = list(dataset.as_numpy_iterator())

        # Map over a dataset.
        dataset = dataset.map(lambda x: np.add(x, 1))

        for value, value_from_dataset in zip(values, dataset):
            self.assertIsInstance(value_from_dataset, np.ndarray)
            self.assertAllEqual(value_from_dataset, value + 1)

        # Batch a dataset.
        dataset = tf.data.Dataset.from_tensor_slices(values_as_array).batch(2)

        for value, value_from_dataset in zip([[1, 2], [3, 4], [5, 6]],
                                             dataset):
            self.assertIsInstance(value_from_dataset, np.ndarray)
            self.assertAllEqual(value_from_dataset, value)