def _load_libraries(p): """load_dependency_and_library""" for library in p: try: v = _load_library(library) # Only Linux utilize the library for # EncodeAACFunctionFiniFFmpeg # EncodeAACFunctionInitFFmpeg # EncodeAACFunctionCallFFmpeg # DecodeAACFunctionFiniFFmpeg # DecodeAACFunctionInitFFmpeg # DecodeAACFunctionCallFFmpeg l = (_load_library(library, "dependency") if sys.platform == "linux" else None) if v is not None: return v, l except NotImplementedError as e: warnings.warn("could not load {}: {}".format(library, e)) NotImplementedError raise NotImplementedError("could not find ffmpeg after search through ", p)
def _load_dependency_and_library(p): """load_dependency_and_library""" for library in p: # First try load all dependencies with RTLD_LOCAL entries = [] for dependency in p[library]: try: entries.append(ctypes.CDLL(dependency)) except OSError: pass if len(entries) == len(p[library]): # Dependencies has been satisfied, load dependencies again with RTLD_GLOBAL, no error is expected for dependency in p[library]: ctypes.CDLL(dependency, mode=ctypes.RTLD_GLOBAL) # Load video_op v = _load_library(library) l = _load_library(library, "dependency") return v, l # Otherwise we dlclose and retry entries.reverse() for entry in entries: _ctypes.dlclose(entry._handle) # pylint: disable=protected-access raise NotImplementedError("could not find ffmpeg after search through ", p)
from __future__ import absolute_import from __future__ import division from __future__ import print_function from six import iteritems from six import string_types from tensorflow.python.data.experimental.ops import interleave_ops from tensorflow.python.data.ops import dataset_ops from tensorflow.python.data.util import nest from tensorflow.python.data.util import structure from tensorflow.python.framework import dtypes from tensorflow.python.framework import tensor_shape from tensorflow_io.core.python.ops import _load_library _bigtable_so = _load_library("_bigtable.so") class BigtableClient(object): # pylint: disable=useless-object-inheritance """BigtableClient is the entrypoint for interacting with Cloud Bigtable in TF. BigtableClient encapsulates a connection to Cloud Bigtable, and exposes the `table` method to open a Bigtable table. """ def __init__(self, project_id, instance_id, connection_pool_size=None, max_receive_message_size=None): """Creates a BigtableClient that can be used to open connections to tables.
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Kafka Dataset.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow import dtypes from tensorflow.compat.v1 import data from tensorflow_io.core.python.ops import _load_library kafka_ops = _load_library('_kafka_ops.so') class KafkaDataset(data.Dataset): """A Kafka Dataset that consumes the message. """ def __init__(self, topics, servers="localhost", group="", eof=False, timeout=1000, config_global=None, config_topic=None): """Create a KafkaReader.
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Kinesis Dataset.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow import dtypes from tensorflow.compat.v1 import data from tensorflow_io.core.python.ops import _load_library kinesis_ops = _load_library('_kinesis_ops.so') class KinesisDataset(data.Dataset): """A Kinesis Dataset that consumes the message. Kinesis is a managed service provided by AWS for data streaming. This dataset reads messages from Kinesis with each message presented as a `tf.string`. For example, we can construct and use the KinesisDataset as follows: ```python dataset = KinesisDataset( "kinesis_stream_name", read_indefinitely=False) next = dataset.make_one_shot_iterator().get_next() with tf.Session() as sess:
# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """GRPCInput.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow.compat.v1 import data from tensorflow_io.core.python.ops import _load_library grpc_ops = _load_library('_grpc_ops.so') class GRPCDataset(data.Dataset): """A GRPC Dataset """ def __init__(self, endpoint, shape, dtype, batch=None): """Create a GRPC Reader. Args: endpoint: A `tf.string` tensor containing one or more endpoints. """ self._data_input = grpc_ops.grpc_input(endpoint) self._batch = 0 if batch is None else batch shape[0] = None self._output_shapes = tuple([
""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections from operator import itemgetter from tensorflow.python.data.experimental.ops import interleave_ops from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import dtypes from tensorflow.python.framework import tensor_spec from tensorflow_io.core.python.ops import _load_library _bigquery_so = _load_library("_bigquery.so") class BigQueryClient(object): """BigQueryClient is the entrypoint for interacting with Cloud BigQuery in TF. BigQueryClient encapsulates a connection to Cloud BigQuery, and exposes the `readSession` method to initiate a BigQuery read session. """ def __init__(self): """Creates a BigQueryClient to start BigQuery read sessions. """ self._client_resource = _bigquery_so.big_query_client() def read_session(self,
# you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """PcapDataset""" import tensorflow as tf from tensorflow_io.core.python.ops import data_ops from tensorflow_io.core.python.ops import _load_library pcap_ops = _load_library('_pcap_ops.so') class PcapDataset(data_ops.Dataset): """A pcap Dataset. Pcap is a popular file format for capturing network packets. """ def __init__(self, filenames, batch=None): """Create a pcap Reader. Args: filenames: A `tf.string` tensor containing one or more filenames. """ batch = 0 if batch is None else batch dtypes = [tf.float64, tf.string] shapes = [
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """PubSub Dataset.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow import dtypes from tensorflow.compat.v1 import data from tensorflow_io.core.python.ops import _load_library pubsub_ops = _load_library('_pubsub_ops.so') class PubSubDataset(data.Dataset): """A PubSub Dataset that consumes the message. """ def __init__(self, subscriptions, server=None, eof=False, timeout=1000): """Create a PubSubDataset. Args: subscriptions: A `tf.string` tensor containing one or more subscriptions. server: The pubsub server. eof: If True, the pubsub reader will stop on EOF. timeout: The timeout value for the PubSub to wait (in millisecond). """
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Dataset.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow_io.core.python.ops import _load_library _golang_ops = _load_library('libtensorflow_io_golang.so') io_prometheus_readable_init = _golang_ops.io_prometheus_readable_init io_prometheus_readable_spec = _golang_ops.io_prometheus_readable_spec io_prometheus_readable_read = _golang_ops.io_prometheus_readable_read io_prometheus_scrape = _golang_ops.io_prometheus_scrape
import tensorflow as tf from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.training import training from tensorflow_io.core.python.ops import _load_library # Some GCS operations may be pre-defined and available via tf.contrib in # earlier TF versions. Because these ops are pre-registered, they will not be # visible from the _gcs_config_ops library. In this case we use the tf.contrib # version instead. tf_v1 = tf.version.VERSION.startswith('1') if not tf_v1: _gcs_config_so = _load_library("_gcs_config_ops.so") gcs_configure_credentials = _gcs_config_so.gcs_configure_credentials gcs_configure_block_cache = _gcs_config_so.gcs_configure_block_cache class BlockCacheParams(object): # pylint: disable=useless-object-inheritance """BlockCacheParams is a struct used for configuring the GCS Block Cache.""" def __init__(self, block_size=None, max_bytes=None, max_staleness=None): self._block_size = block_size or 128 * 1024 * 1024 self._max_bytes = max_bytes or 2 * self._block_size self._max_staleness = max_staleness or 0 @property def block_size(self): return self._block_size
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """BigQuery reading support for TensorFlow.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.framework import ops from tensorflow.python.ops import io_ops from tensorflow_io.core.python.ops import _load_library _bigquery_reader_so = _load_library("_bigquery_reader_ops.so") class BigQueryReader(io_ops.ReaderBase): """A Reader that outputs keys and tf.Example values from a BigQuery table. Example use: ```python # Assume a BigQuery has the following schema, # name STRING, # age INT, # state STRING # Create the parse_examples list of features. features = dict( name=tf.FixedLenFeature([1], tf.string),
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """SequenceFile Dataset.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow import dtypes from tensorflow.compat.v1 import data from tensorflow_io.core.python.ops import _load_library hadoop_ops = _load_library('_hadoop_ops.so') class SequenceFileDataset(data.Dataset): """A Sequence File Dataset that reads the sequence file.""" def __init__(self, filenames): """Create a `SequenceFileDataset`. `SequenceFileDataset` allows a user to read data from a hadoop sequence file. A sequence file consists of (key value) pairs sequentially. At the moment, `org.apache.hadoop.io.Text` is the only serialization type being supported, and there is no compression support. For example: ```python
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Image Dataset.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow import dtypes from tensorflow.compat.v1 import data from tensorflow_io.core.python.ops import _load_library image_ops = _load_library('_image_ops.so') class WebPDataset(data.Dataset): """A WebP Image File Dataset that reads the WebP file.""" def __init__(self, filenames): """Create a `WebPDataset`. filenames: A `tf.string` tensor containing one or more filenames. """ self._filenames = tf.convert_to_tensor(filenames, dtype=dtypes.string, name="filenames") super(WebPDataset, self).__init__() def _inputs(self):
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """OSS File System Support OSS is an Object Storage Service provided by Alibaba Cloud. This module implements a filesystem on top of it. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow_io.core.python.ops import _load_library _load_library("_oss_ops.so")
"""LibSVM Dataset.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow import sparse from tensorflow.compat.v1 import data from tensorflow_io.core.python.ops import _load_library gen_libsvm_ops = _load_library('_libsvm_ops.so') def decode_libsvm(content, num_features, dtype=None, label_dtype=None): """Convert Libsvm records to a tensor of label and a tensor of feature. Args: content: A `Tensor` of type `string`. Each string is a record/row in the Libsvm format. num_features: The number of features. dtype: The type of the output feature tensor. Default to tf.float32. label_dtype: The type of the output label tensor. Default to tf.int64. Returns: features: A `SparseTensor` of the shape `[input_shape, num_features]`. labels: A `Tensor` of the same shape as content. """ labels, indices, values, shape = gen_libsvm_ops.decode_libsvm( content, num_features, dtype=dtype, label_dtype=label_dtype) return sparse.SparseTensor(indices, values, shape), labels def make_libsvm_dataset(file_names, num_features, dtype=None,