예제 #1
0
def load_dependency_and_library(p):
  for library in p:
    # First try load all dependencies with RTLD_LOCAL
    entries = []
    for dependency in p[library]:
      try:
        entries.append(ctypes.CDLL(dependency))
      except OSError as e:
        pass
    if len(entries) == len(p[library]):
      # Dependencies has been satisfied, load dependencies again with RTLD_GLOBAL, no error is expected
      for dependency in p[library]:
        ctypes.CDLL(dependency, mode=ctypes.RTLD_GLOBAL)
      # Load video_op
      return _load_library(library)
    # Otherwise we dlclose and retry
    entries.reverse()
    for entry in entries:
      _ctypes.dlclose(entry._handle)
  raise NotImplementedError("could not find ffmpeg after search through ", p)
예제 #2
0
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""SequenceFile Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import os

import tensorflow
from tensorflow import dtypes
from tensorflow.compat.v1 import data
from tensorflow_io import _load_library
hadoop_ops = _load_library('_hadoop_ops.so')


class SequenceFileDataset(data.Dataset):
    """A Sequence File Dataset that reads the sequence file."""
    def __init__(self, filenames):
        """Create a `SequenceFileDataset`.

    `SequenceFileDataset` allows a user to read data from a hadoop sequence
    file. A sequence file consists of (key value) pairs sequentially. At
    the moment, `org.apache.hadoop.io.Text` is the only serialization type
    being supported, and there is no compression support.

    For example:

    ```python
예제 #3
0
For background on Cloud BigQuery, see: https://cloud.google.com/bigquery .
"""

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from tensorflow.python.data.experimental.ops import interleave_ops
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.util import structure
from tensorflow.python.framework import dtypes
from tensorflow_io import _load_library


_bigquery_so = _load_library("_bigquery.so")


class BigQueryClient(object):
  """BigQueryClient is the entrypoint for interacting with Cloud BigQuery in TF.

  BigQueryClient encapsulates a connection to Cloud BigQuery, and exposes the
  `readSession` method to initiate a BigQuery read session.
  """

  def __init__(self, client_resource=None):
    """Creates a BigQueryClient to start BigQuery read sessions.

    Args:
      client_resource: client resource (optional).
    """
예제 #4
0
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""OSS File System Support

OSS is an Object Storage Service provided by Alibaba Cloud. This module
implements a filesystem on top of it.
"""

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from tensorflow_io import _load_library

_load_library("_oss_ops.so")
예제 #5
0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""CIFAR Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import tensorflow
from tensorflow import dtypes
from tensorflow.compat.v1 import data
from tensorflow_io import _load_library
cifar_ops = _load_library('_cifar_ops.so')


class _CIFAR10Dataset(data.Dataset):
    """A CIFAR File Dataset that reads the cifar file."""
    def __init__(self, filename, filters, batch=None):
        """Create a `CIFARDataset`.

    Args:
      filename: A `tf.string` tensor containing one or more filenames.
    """
        self._data_input = cifar_ops.cifar10_input(filename, filters)
        self._batch = 0 if batch is None else batch
        super(_CIFAR10Dataset, self).__init__()

    def _inputs(self):
예제 #6
0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""LMDBDataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import tensorflow
from tensorflow import dtypes
from tensorflow.compat.v1 import data
from tensorflow_io import _load_library
lmdb_ops = _load_library('_lmdb_ops.so')


class LMDBDataset(data.Dataset):
    """A LMDB Dataset that reads the lmdb file."""
    def __init__(self, filenames):
        """Create a `LMDBDataset`.

    `LMDBDataset` allows a user to read data from a mdb file as
    (key value) pairs sequentially.

    For example:
    ```python
    tf.enable_eager_execution()
    dataset = LMDBDataset("/foo/bar.mdb")
    # Prints the (key, value) pairs inside a lmdb file.
예제 #7
0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Parquet Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import tensorflow
from tensorflow import dtypes
from tensorflow.compat.v1 import data
from tensorflow_io import _load_library
parquet_ops = _load_library('_parquet_ops.so')

if hasattr(tensorflow, "nest"):
    from tensorflow import nest  # pylint: disable=ungrouped-imports
else:
    from tensorflow.python.data.util import nest  # pylint: disable=ungrouped-imports


class ParquetDataset(data.Dataset):
    """A Parquet Dataset that reads the parquet file."""
    def __init__(self, filenames, columns, output_types):
        """Create a `ParquetDataset`.

    `ParquetDataset` allows a user to read data from a parquet file.
    For example:
예제 #8
0
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from six import iteritems
from six import string_types

from tensorflow.python.data.experimental.ops import interleave_ops
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.util import nest
from tensorflow.python.data.util import structure
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_shape
from tensorflow_io import _load_library

_bigtable_so = _load_library("_bigtable.so")


class BigtableClient(object):  # pylint: disable=useless-object-inheritance
    """BigtableClient is the entrypoint for interacting with Cloud Bigtable in TF.

  BigtableClient encapsulates a connection to Cloud Bigtable, and exposes the
  `table` method to open a Bigtable table.
  """
    def __init__(self,
                 project_id,
                 instance_id,
                 connection_pool_size=None,
                 max_receive_message_size=None):
        """Creates a BigtableClient that can be used to open connections to tables.
예제 #9
0
파일: text_ops.py 프로젝트: caszkgui/io
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TextInput/TextOutput."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import tensorflow
from tensorflow import dtypes
from tensorflow.compat.v1 import data
from tensorflow_io import _load_library
text_ops = _load_library('_text_ops.so')


class TextDataset(data.Dataset):
    """A Text Dataset
  """
    def __init__(self, filename):
        """Create a Text Reader.

    Args:
      filename: A `tf.string` tensor containing one or more filenames.
    """
        self._data_input = text_ops.text_input(filename, ["none", "gz"])
        super(TextDataset, self).__init__()

    def _inputs(self):
예제 #10
0
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""BigQuery reading support for TensorFlow."""

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from tensorflow.python.framework import ops
from tensorflow.python.ops import io_ops
from tensorflow_io import _load_library

_bigquery_reader_so = _load_library("_bigquery_reader_ops.so")


class BigQueryReader(io_ops.ReaderBase):
    """A Reader that outputs keys and tf.Example values from a BigQuery table.

  Example use:
    ```python
    # Assume a BigQuery has the following schema,
    #     name      STRING,
    #     age       INT,
    #     state     STRING

    # Create the parse_examples list of features.
    features = dict(
      name=tf.FixedLenFeature([1], tf.string),
예제 #11
0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""PubSub Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import tensorflow as tf
from tensorflow import dtypes
from tensorflow.compat.v1 import data
from tensorflow_io import _load_library
pubsub_ops = _load_library('_pubsub_ops.so')


class PubSubDataset(data.Dataset):
    """A PubSub Dataset that consumes the message.
  """
    def __init__(self, subscriptions, server=None, eof=False, timeout=1000):
        """Create a PubSubDataset.

    Args:
      subscriptions: A `tf.string` tensor containing one or more subscriptions.
      server: The pubsub server.
      eof: If True, the pubsub reader will stop on EOF.
      timeout: The timeout value for the PubSub to wait
               (in millisecond).
    """
예제 #12
0
"""LibSVM Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from tensorflow import sparse
from tensorflow_io import _load_library
gen_libsvm_ops = _load_library('_libsvm_ops.so')


def decode_libsvm(content, num_features, dtype=None, label_dtype=None):
    """Convert Libsvm records to a tensor of label and a tensor of feature.
  Args:
    content: A `Tensor` of type `string`. Each string is a record/row in
      the Libsvm format.
    num_features: The number of features.
    dtype: The type of the output feature tensor. Default to tf.float32.
    label_dtype: The type of the output label tensor. Default to tf.int64.
  Returns:
    features: A `SparseTensor` of the shape `[input_shape, num_features]`.
    labels: A `Tensor` of the same shape as content.
  """
    labels, indices, values, shape = gen_libsvm_ops.decode_libsvm(
        content, num_features, dtype=dtype, label_dtype=label_dtype)
    return sparse.SparseTensor(indices, values, shape), labels


def make_libsvm_dataset(file_names,
                        num_features,
                        dtype=None,
                        label_dtype=None,
예제 #13
0
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ignite File System for checkpointing and communication with TensorBoard.

Apache Ignite is a memory-centric distributed database, caching, and
processing platform for transactional, analytical, and streaming workloads,
delivering in-memory speeds at petabyte scale. In addition to database
functionality Apache Ignite provides a distributed file system called
IGFS (https://ignite.apache.org/features/igfs.html). IGFS delivers a similar
functionality to Hadoop HDFS, but only in-memory. In fact, in addition to
its own APIs, IGFS implements Hadoop FileSystem API and can be transparently
plugged into Hadoop or Spark deployments. This contrib package contains an
integration between IGFS and TensorFlow.
"""

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from tensorflow_io import _load_library
_load_library("_ignite_ops.so", "file_system")
예제 #14
0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Audio Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import tensorflow
from tensorflow import dtypes
from tensorflow.compat.v1 import data
from tensorflow_io import _load_library
audio_ops = _load_library('_audio_ops.so')


class WAVDataset(data.Dataset):
    """A WAV Dataset
  """
    def __init__(self, filenames, batch=None):
        """Create a WAVDataset.

    Args:
      filenames: A `tf.string` tensor containing one or more filenames.
    """
        self._data_input = audio_ops.wav_input(filenames)
        self._batch = 0 if batch is None else batch
        super(WAVDataset, self).__init__()
예제 #15
0
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Image Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import tensorflow
from tensorflow import dtypes
from tensorflow.compat.v1 import data
from tensorflow_io import _load_library

image_ops = _load_library('_image_ops.so')


class WebPDataset(data.Dataset):
    """A WebP Image File Dataset that reads the WebP file."""
    def __init__(self, filenames):
        """Create a `WebPDataset`.

      filenames: A `tf.string` tensor containing one or more filenames.
    """
        self._filenames = tensorflow.convert_to_tensor(filenames,
                                                       dtype=dtypes.string,
                                                       name="filenames")
        super(WebPDataset, self).__init__()

    def _inputs(self):
예제 #16
0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""MNIST Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import tensorflow
from tensorflow import dtypes
from tensorflow.compat.v1 import data
from tensorflow_io import _load_library
mnist_ops = _load_library('_mnist_ops.so')


class _MNISTBaseDataset(data.Dataset):
    """A MNIST Dataset
  """
    def __init__(self, mnist_op_class, filenames, compression_type=None):
        """Create a MNISTReader.

    Args:
      mnist_op_class: The op of the dataset, either
          mnist_ops.mnist_image_dataset or mnist_ops.mnist_label_dataset.
      filenames: A `tf.string` tensor containing one or more filenames.
      compression_type: (Optional.) A `tf.string` scalar evaluating to one of
        `""` (no compression), `"ZLIB"`, or `"GZIP"`.
    """
예제 #17
0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""GRPCInput."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import tensorflow as tf
from tensorflow.compat.v1 import data
from tensorflow_io import _load_library
grpc_ops = _load_library('_grpc_ops.so')

class GRPCDataset(data.Dataset):
  """A GRPC Dataset
  """

  def __init__(self, endpoint, shape, dtype, batch=None):
    """Create a GRPC Reader.

    Args:
      endpoint: A `tf.string` tensor containing one or more endpoints.
    """
    self._data_input = grpc_ops.grpc_input(endpoint)
    self._batch = 0 if batch is None else batch
    shape[0] = None
    self._output_shapes = tuple([
예제 #18
0
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""PcapDataset"""
import tensorflow as tf
from tensorflow_io.core.python.ops import data_ops as data_ops
from tensorflow_io import _load_library
pcap_ops = _load_library('_pcap_ops.so')


class PcapDataset(data_ops.Dataset):
  """A pcap Dataset. Pcap is a popular file format for capturing network packets.
  """

  def __init__(self, filenames, batch=None):
    """Create a pcap Reader.

    Args:
      filenames: A `tf.string` tensor containing one or more filenames.
    """
    batch = 0 if batch is None else batch
    dtypes = [tf.float64, tf.string]
    shapes = [
예제 #19
0
파일: video_dataset_ops.py 프로젝트: lc0/io
"""Video Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import os

import tensorflow
from tensorflow import dtypes
from tensorflow import errors
from tensorflow.compat.v1 import data
from tensorflow_io import _load_library
video_ops = None
for f in ['_video_ops_ffmpeg_3.4.so', '_video_ops_ffmpeg_2.8.so', '_video_ops_libav_9.20.so']:
  try:
    video_ops = _load_library(f)
    break;
  except NotImplementedError as e:
    print(e)


class VideoDataset(data.Dataset):
  """A Video File Dataset that reads the video file."""

  def __init__(self, filenames):
    """Create a `VideoDataset`.

    `VideoDataset` allows a user to read data from a video file with
    ffmpeg. The output of VideoDataset is a sequence of (height, weight, 3)
    tensor in rgb24 format.
예제 #20
0
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Arrow Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import io

import tensorflow

from tensorflow import dtypes
from tensorflow.compat.v1 import data
from tensorflow_io import _load_library
arrow_ops = _load_library('_arrow_ops.so')

if hasattr(tensorflow, "nest"):
  from tensorflow import nest # pylint: disable=ungrouped-imports
else:
  from tensorflow.python.data.util import nest # pylint: disable=ungrouped-imports


def arrow_to_tensor_type(pa_t):
  """Convert Arrow type to tuple of (Tensor dtype, shape dims).
  This function requires pyarrow to be installed.
  """
  import pyarrow as pa
  shape_dims = []  # initialize shape as scalar
  if pa.types.is_boolean(pa_t):
    tf_t = dtypes.bool
예제 #21
0
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Kinesis Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import os

import tensorflow

from tensorflow import dtypes
from tensorflow.compat.v1 import data
from tensorflow_io import _load_library
kinesis_ops = _load_library('_kinesis_ops.so')


class KinesisDataset(data.Dataset):
    """A Kinesis Dataset that consumes the message.

  Kinesis is a managed service provided by AWS for data streaming.
  This dataset reads messages from Kinesis with each message presented
  as a `tf.string`.

  For example, we can construct and use the KinesisDataset as follows:
  ```python
  dataset = KinesisDataset(
      "kinesis_stream_name", read_indefinitely=False)
  next = dataset.make_one_shot_iterator().get_next()
  with tf.Session() as sess:
예제 #22
0
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from tensorflow_io import _load_library
core_ops = _load_library('libtensorflowio.so')
예제 #23
0
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""KafkaOutputSequence."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from tensorflow_io import _load_library
kafka_ops = _load_library('_kafka_ops.so')


class KafkaOutputSequence(object):
  """KafkaOutputSequence"""

  def __init__(self, topic, servers="localhost"):
    """Create a `KafkaOutputSequence`.
    """
    self._topic = topic
    self._resource = kafka_ops.kafka_output_sequence(
        topic=topic, servers=servers)

  def setitem(self, index, item):
    kafka_ops.kafka_output_sequence_set_item(self._resource, index, item)
예제 #24
0
from __future__ import division
from __future__ import print_function

import abc
import socket
import ssl
import struct

import six

import tensorflow as tf

from tensorflow import dtypes
from tensorflow.compat.v1 import data
from tensorflow_io import _load_library
ignite_ops = _load_library("_ignite_ops.so")


@six.add_metaclass(abc.ABCMeta)  # pylint: disable=useless-object-inheritance
class Readable(object):
    """Readable abstract class that exposes methods to do reading-related

     operations.
  """
    @abc.abstractmethod
    def __init__(self):
        pass

    def read_byte(self):
        """Reads and returnes byte."""
        return self._read("b", 1)
예제 #25
0
import tensorflow as tf
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.training import training
from tensorflow_io import _load_library

# Some GCS operations may be pre-defined and available via tf.contrib in
# earlier TF versions. Because these ops are pre-registered, they will not be
# visible from the _gcs_config_ops library. In this case we use the tf.contrib
# version instead.
tf_v1 = tf.version.VERSION.startswith('1')

if not tf_v1:
    _gcs_config_so = _load_library("_gcs_config_ops.so")
    gcs_configure_credentials = _gcs_config_so.gcs_configure_credentials
    gcs_configure_block_cache = _gcs_config_so.gcs_configure_block_cache


class BlockCacheParams(object):  # pylint: disable=useless-object-inheritance
    """BlockCacheParams is a struct used for configuring the GCS Block Cache."""
    def __init__(self, block_size=None, max_bytes=None, max_staleness=None):
        self._block_size = block_size or 128 * 1024 * 1024
        self._max_bytes = max_bytes or 2 * self._block_size
        self._max_staleness = max_staleness or 0

    @property
    def block_size(self):
        return self._block_size