Exemplo n.º 1
0
 def test_deprecated_illegal_args(self):
   instructions = "This is how you update..."
   with self.assertRaisesRegexp(ValueError, "date"):
     deprecation.deprecated(None, instructions)
   with self.assertRaisesRegexp(ValueError, "date"):
     deprecation.deprecated("", instructions)
   with self.assertRaisesRegexp(ValueError, "YYYY-MM-DD"):
     deprecation.deprecated("07-04-2016", instructions)
   date = "2016-07-04"
   with self.assertRaisesRegexp(ValueError, "instructions"):
     deprecation.deprecated(date, None)
   with self.assertRaisesRegexp(ValueError, "instructions"):
     deprecation.deprecated(date, "")
    def test_deprecated_namedtuple(self, mock_warning):
        date = "2016-07-04"
        instructions = "This is how you update..."

        mytuple = deprecation.deprecated(date, instructions, warn_once=True)(
            collections.namedtuple("my_tuple", ["field1", "field2"]))

        mytuple(1, 2)
        self.assertEqual(1, mock_warning.call_count)
        mytuple(3, 4)
        self.assertEqual(1, mock_warning.call_count)
        self.assertIn("IS DEPRECATED", mytuple.__doc__)
Exemplo n.º 3
0
from tensorflow.python.training import monitored_session
from tensorflow.python.training import queue_runner
from tensorflow.python.training import saver as tf_saver
from tensorflow.python.training import session_manager as session_manager_lib
from tensorflow.python.training import summary_io
from tensorflow.python.training import supervisor as tf_supervisor
from tensorflow.python.util.deprecation import deprecated

# Singleton for SummaryWriter per logdir folder.
_SUMMARY_WRITERS = {}

# Lock protecting _SUMMARY_WRITERS
_summary_writer_lock = threading.Lock()

_graph_action_deprecation = deprecated(
    '2017-02-15',
    'graph_actions.py will be deleted. Use tf.train.* utilities instead. '
    'You can use learn/estimators/estimator.py as an example.')


@_graph_action_deprecation
def clear_summary_writers():
    """Clear cached summary writers. Currently only used for unit tests."""
    return summary_io.SummaryWriterCache.clear()


def get_summary_writer(logdir):
    """Returns single SummaryWriter per logdir in current run.

  Args:
    logdir: str, folder to write summaries.
Exemplo n.º 4
0
                m = np.max(a.shape[-2:].as_list())
            else:
                m = tf.reduce_max(tf.shape(a)[-2:])
            eps = np.finfo(dtype_util.as_numpy_dtype(a.dtype)).eps
            tol = (eps * tf.cast(m, a.dtype) *
                   tf.reduce_max(s, axis=-1, keepdims=True))
        return tf.reduce_sum(tf.cast(s > tol, tf.int32), axis=-1)


try:
    matrix_rank = tf.linalg.matrix_rank
except AttributeError:
    pass

matrix_rank = deprecation.deprecated(
    '2019-10-01',
    'tfp.math.matrix_rank is deprecated. Use tf.linalg.matrix_rank instead',
    warn_once=True)(matrix_rank)


def cholesky_concat(chol, cols, name=None):
    """Concatenates `chol @ chol.T` with additional rows and columns.

  This operation is conceptually identical to:
  ```python
  def cholesky_concat_slow(chol, cols):  # cols shaped (n + m) x m = z x m
    mat = tf.matmul(chol, chol, adjoint_b=True)  # batch of n x n
    # Concat columns.
    mat = tf.concat([mat, cols[..., :tf.shape(mat)[-2], :]], axis=-1)  # n x z
    # Concat rows.
    mat = tf.concat([mat, tf.linalg.matrix_transpose(cols)], axis=-2)  # z x z
    return tf.linalg.cholesky(mat)
Exemplo n.º 5
0
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Functions for computing statistics of samples."""

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from tensorflow_probability.python import stats
from tensorflow.python.util import deprecation

__all__ = [
    "auto_correlation",
    "percentile",
]

auto_correlation_deprecator = deprecation.deprecated(
    "2018-10-01",
    "auto_correlation is moved to the `stats` namespace.  Access it via: "
    "`tfp.stats.auto_correlation`.",
    warn_once=True)
auto_correlation = auto_correlation_deprecator(stats.auto_correlation)

percentile_deprecator = deprecation.deprecated(
    "2018-10-01",
    "percentile is moved to the `stats` namespace.  Access it via: "
    "`tfp.stats.percentile`.",
    warn_once=True)
percentile = percentile_deprecator(stats.percentile)
Exemplo n.º 6
0
            scale = tf.convert_to_tensor(value=scale,
                                         name="softplus_scale",
                                         dtype=dtype)
            super(_InverseGammaWithSoftplusConcentrationScale, self).__init__(
                concentration=tf.nn.softplus(concentration,
                                             name="softplus_concentration"),
                scale=tf.nn.softplus(scale, name="softplus_scale"),
                validate_args=validate_args,
                allow_nan_stats=allow_nan_stats,
                name=name)
        self._parameters = parameters


_rate_deprecator = deprecation.deprecated(
    "2019-06-05",
    "InverseGammaWithSoftplusConcentrationRate is deprecated, use "
    "InverseGamma(concentration=tf.nn.softplus(concentration), "
    "scale=tf.nn.softplus(scale)) instead.",
    warn_once=True)
# pylint: disable=invalid-name
InverseGammaWithSoftplusConcentrationRate = _rate_deprecator(
    _InverseGammaWithSoftplusConcentrationScale)

_scale_deprecator = deprecation.deprecated(
    "2019-06-05",
    "InverseGammaWithSoftplusConcentrationScale is deprecated, use "
    "InverseGamma(concentration=tf.nn.softplus(concentration), "
    "scale=tf.nn.softplus(scale)) instead.",
    warn_once=True)
InverseGammaWithSoftplusConcentrationScale = _scale_deprecator(
    _InverseGammaWithSoftplusConcentrationScale)
Exemplo n.º 7
0
from tensorflow_probability.python.math.root_search import secant_root
from tensorflow_probability.python.math.scan_associative import scan_associative
from tensorflow_probability.python.math.sparse import dense_to_sparse
from tensorflow_probability.python.math.special import erfcinv
from tensorflow_probability.python.math.special import lambertw
from tensorflow_probability.python.math.special import lambertw_winitzki_approx
from tensorflow_probability.python.math.special import lbeta
from tensorflow_probability.python.math.special import log_gamma_correction
from tensorflow_probability.python.math.special import log_gamma_difference
from tensorflow_probability.python.math.special import round_exponential_bump_function
from tensorflow_probability.python.random import rademacher as random_rademacher
from tensorflow_probability.python.random import rayleigh as random_rayleigh

from tensorflow.python.util import deprecation  # pylint: disable=g-direct-tensorflow-import

random_rademacher = deprecation.deprecated(
    '2020-09-20', 'Use tfp.random.rademacher')(random_rademacher)
random_rayleigh = deprecation.deprecated(
    '2020-09-20', 'Use tfp.random.rayleigh')(random_rayleigh)

_allowed_symbols = [
    'round_exponential_bump_function',
    'batch_interp_regular_1d_grid',
    'batch_interp_regular_nd_grid',
    'bessel_iv_ratio',
    'bessel_ive',
    'bessel_kve',
    'cholesky_concat',
    'cholesky_update',
    'clip_by_value_preserve_gradient',
    'custom_gradient',
    'dense_to_sparse',
Exemplo n.º 8
0
from tensorflow_probability.python.internal.reparameterization import FULLY_REPARAMETERIZED
from tensorflow_probability.python.internal.reparameterization import NOT_REPARAMETERIZED
from tensorflow_probability.python.internal.reparameterization import ReparameterizationType

# Deprecated:
from tensorflow_probability.python.experimental.substrates.numpy.math.generic import reduce_weighted_logsumexp as _reduce_weighted_logsumexp
from tensorflow_probability.python.experimental.substrates.numpy.math.generic import softplus_inverse as _softplus_inverse
from tensorflow_probability.python.experimental.substrates.numpy.math.linalg import fill_triangular as _fill_triangular
from tensorflow_probability.python.experimental.substrates.numpy.math.linalg import fill_triangular_inverse as _fill_triangular_inverse
from tensorflow_probability.python.experimental.substrates.numpy.util.seed_stream import SeedStream as _SeedStream

# Module management:
from tensorflow_probability.python.experimental.substrates.numpy.distributions.kullback_leibler import augment_kl_xent_docs
from tensorflow.python.util import deprecation  # pylint: disable=g-direct-tensorflow-import

_deprecated = deprecation.deprecated('2019-10-01',
                                     'This function has moved to `tfp.math`.')

fill_triangular = _deprecated(_fill_triangular)

fill_triangular_inverse = _deprecated(_fill_triangular_inverse)

softplus_inverse = _deprecated(_softplus_inverse)

reduce_weighted_logsumexp = _deprecated(_reduce_weighted_logsumexp)


class SeedStream(_SeedStream):

    __init__ = deprecation.deprecated(
        '2019-10-01',
        'SeedStream has moved to `tfp.util.SeedStream`.')(_SeedStream.__init__)
Exemplo n.º 9
0
class SeedStream(_SeedStream):

    __init__ = deprecation.deprecated(
        '2019-10-01',
        'SeedStream has moved to `tfp.util.SeedStream`.')(_SeedStream.__init__)
Exemplo n.º 10
0
Deprecated: please see the new location of this module at `tfx.types.artifact`
and `tfx.types.artifact_utils`.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from typing import Dict, List, Text

from tensorflow.python.util import deprecation  # pylint: disable=g-direct-tensorflow-import
from tfx.types import artifact_utils
from tfx.types.artifact import Artifact

TfxType = deprecation.deprecated(  # pylint: disable=invalid-name
    None,
    'tfx.utils.types.TfxType has been renamed to tfx.types.Artifact as of '
    'TFX 0.14.0.')(Artifact)

TfxArtifact = deprecation.deprecated(  # pylint: disable=invalid-name
    None,
    'tfx.utils.types.TfxArtifact has been renamed to tfx.types.Artifact as of '
    'TFX 0.14.0.')(Artifact)


@deprecation.deprecated(
    None, 'tfx.utils.types.parse_tfx_type_dict has been renamed to '
    'tfx.types.artifact_utils.parse_artifact_dict as of TFX 0.14.0.')
def parse_tfx_type_dict(json_str: Text) -> Dict[Text, List[Artifact]]:
    return artifact_utils.parse_artifact_dict(json_str)

Exemplo n.º 11
0
                 name="InverseGammaWithSoftplusConcentrationScale"):
        if rate is not None:
            scale = rate
        parameters = dict(locals())
        with tf.name_scope(name, values=[concentration, scale]) as name:
            dtype = dtype_util.common_dtype([concentration, scale])
            concentration = tf.convert_to_tensor(value=concentration,
                                                 name="softplus_concentration",
                                                 dtype=dtype)
            scale = tf.convert_to_tensor(value=scale,
                                         name="softplus_scale",
                                         dtype=dtype)
            super(InverseGammaWithSoftplusConcentrationScale, self).__init__(
                concentration=tf.nn.softplus(concentration,
                                             name="softplus_concentration"),
                scale=tf.nn.softplus(scale, name="softplus_scale"),
                validate_args=validate_args,
                allow_nan_stats=allow_nan_stats,
                name=name)
        self._parameters = parameters


_rate_deprecator = deprecation.deprecated(
    "2019-05-08",
    "InverseGammaWithSoftplusConcentrationRate is deprecated, use "
    "InverseGammaWithSoftplusConcentrationScale instead.",
    warn_once=True)
# pylint: disable=invalid-name
InverseGammaWithSoftplusConcentrationRate = _rate_deprecator(
    InverseGammaWithSoftplusConcentrationScale)
Exemplo n.º 12
0
from tensorflow.python.training import coordinator
from tensorflow.python.training import queue_runner
from tensorflow.python.training import saver as tf_saver
from tensorflow.python.training import session_manager as session_manager_lib
from tensorflow.python.training import summary_io
from tensorflow.python.training import supervisor as tf_supervisor
from tensorflow.python.util.deprecation import deprecated

# Singleton for SummaryWriter per logdir folder.
_SUMMARY_WRITERS = {}

# Lock protecting _SUMMARY_WRITERS
_summary_writer_lock = threading.Lock()

_graph_action_deprecation = deprecated(
    '2017-02-15',
    'graph_actions.py will be deleted. Use tf.train.* utilities instead. '
    'You can use learn/estimators/estimator.py as an example.')


@_graph_action_deprecation
def clear_summary_writers():
  """Clear cached summary writers. Currently only used for unit tests."""
  return summary_io.SummaryWriterCache.clear()


@deprecated(None, 'Use `SummaryWriterCache.get` directly.')
def get_summary_writer(logdir):
  """Returns single SummaryWriter per logdir in current run.

  Args:
    logdir: str, folder to write summaries.
Exemplo n.º 13
0
    'pipeline_config_path', '',
    'Path to a pipeline_pb2.TrainEvalPipelineConfig config '
    'file. If provided, other configs are ignored')
flags.DEFINE_string('eval_config_path', '',
                    'Path to an eval_pb2.EvalConfig config file.')
flags.DEFINE_string('input_config_path', '',
                    'Path to an input_reader_pb2.InputReader config file.')
flags.DEFINE_string('model_config_path', '',
                    'Path to a model_pb2.DetectionModel config file.')
flags.DEFINE_boolean(
    'run_once', False, 'Option to only run a single pass of '
    'evaluation. Overrides the `max_evals` parameter in the '
    'provided config.')
FLAGS = flags.FLAGS

deprecated(None, 'Use object_detection/model_main.py.')


def main(unused_argv):
    assert FLAGS.checkpoint_dir, '`checkpoint_dir` is missing.'
    assert FLAGS.eval_dir, '`eval_dir` is missing.'
    tf.gfile.MakeDirs(FLAGS.eval_dir)
    if FLAGS.pipeline_config_path:
        configs = config_util.get_configs_from_pipeline_file(
            FLAGS.pipeline_config_path)
        tf.gfile.Copy(FLAGS.pipeline_config_path,
                      os.path.join(FLAGS.eval_dir, 'pipeline.config'),
                      overwrite=True)
    else:
        configs = config_util.get_configs_from_multiple_files(
            model_config_path=FLAGS.model_config_path,
Exemplo n.º 14
0
from tensorflow.python.ops import gen_sparse_ops
from tensorflow.python.ops import gen_spectral_ops
from tensorflow.python.platform import tf_logging as logging
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_math_ops import *
# pylint: enable=wildcard-import
from tensorflow.python.util import compat
from tensorflow.python.util import deprecation
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import tf_export

# Aliases for some automatically-generated names.
linspace = gen_math_ops.lin_space

arg_max = deprecation.deprecated(None, "Use `argmax` instead")(arg_max)  # pylint: disable=used-before-assignment
arg_min = deprecation.deprecated(None, "Use `argmin` instead")(arg_min)  # pylint: disable=used-before-assignment
tf_export("arg_max")(arg_max)
tf_export("arg_min")(arg_min)

# This is set by resource_variable_ops.py. It is included in this way since
# there is a circular dependency between math_ops and resource_variable_ops
_resource_variable_type = None


def _set_doc(doc):
    def _decorator(func):
        func.__doc__ = doc
        return func

    return _decorator