def test_flags_singleton(self):
        flags.config().test_only_experiment_1.reset(False)
        self.assertFalse(flags.config().test_only_experiment_1.value())

        # Get second reference to underlying Flags singleton.
        flag = flags.flags_pybind.Flags()
        flag.test_only_experiment_1.reset(True)

        # check that both references are correctly updated.
        self.assertTrue(flags.config().test_only_experiment_1.value())
        self.assertTrue(flag.test_only_experiment_1.value())
  def test_basic_module(self):
    flags.config().saved_model_fingerprinting.reset(True)
    save_dir = self._create_saved_model()
    files = file_io.list_directory_v2(save_dir)

    self.assertLen(files, 4)
    self.assertIn(constants.FINGERPRINT_FILENAME, files)

    fingerprint_def = self._read_fingerprint(
        file_io.join(save_dir, constants.FINGERPRINT_FILENAME))
    # We cannot check the value due to non-determinism in serialization.
    self.assertGreater(fingerprint_def.graph_def_hash, 0)
    def test_experiment_flag(self):
        self.assertTrue(flags.config().test_only_experiment_1.value())
        self.assertFalse(flags.config().test_only_experiment_2.value())

        flags.config().test_only_experiment_1.reset(False)
        flags.config().test_only_experiment_2.reset(True)

        self.assertFalse(flags.config().test_only_experiment_1.value())
        self.assertTrue(flags.config().test_only_experiment_2.value())
 def benchmarkResourceVariableOp(self):
     num_ops = 100
     num_iters = 10
     duration = self._computeReadVariableOpDuration(num_ops, num_iters)
     name = "BenchmarkReadVariableOp"
     if flags.config().graph_building_optimization.value():
         name += "WithGraphBuildingOptimization"
     self.report_benchmark(name=name,
                           iters=num_iters,
                           wall_time=duration,
                           extras={"num_ops": num_ops})
Exemple #5
0
 def test_toggle_flag(self):
   self.assertFalse(flags.config().saved_model_fingerprinting.value())
   flags.config().saved_model_fingerprinting.reset(True)
   self.assertTrue(flags.config().saved_model_fingerprinting.value())
Exemple #6
0
def _apply_op_helper(op_type_name, name=None, **keywords):  # pylint: disable=invalid-name
  """Implementation of apply_op that returns output_structure, op."""

  op_def, g, producer = _GetOpDef(op_type_name, keywords)
  name = name if name else op_type_name

  attrs, attr_protos = {}, {}
  default_type_attr_map, allowed_list_attr_map = {}, {}
  inputs, input_types, output_structure = [], [], []
  fallback = True

  if (_CanExtractAttrsFastPath(op_def, keywords) and
      flags.config().graph_building_optimization.value()):
    fallback = False
    attr_protos, inputs, input_types, output_structure = (
        op_def_library_pybind.process_inputs(op_type_name, producer, keywords))

  if fallback:
    _CheckOpDeprecation(op_type_name, op_def, producer)
    _ExtractDefaultTypesAndAllowedTypes(op_def, default_type_attr_map,
                                        allowed_list_attr_map)

  # Requires that op_def has passed validation (using the C++
  # ValidateOpDef() from ../framework/op_def_util.h).
  with g.as_default(), ops.name_scope(name) as scope:
    if fallback:
      _ExtractInputsAndAttrs(op_type_name, op_def, allowed_list_attr_map,
                             keywords, default_type_attr_map, attrs, inputs,
                             input_types)
      _ExtractRemainingAttrs(op_type_name, op_def, keywords,
                             default_type_attr_map, attrs)
      _ExtractAttrProto(op_type_name, op_def, attrs, attr_protos)
      del attrs  # attrs is no longer authoritative, use attr_protos instead
      _ExtractOutputStructure(op_type_name, op_def, attr_protos,
                              output_structure)
      _CheckAllInputsUsed(op_type_name, keywords)

    # NOTE(mrry): We add an explicit colocation constraint between
    # the newly created op and any of its reference-typed inputs.
    must_colocate_inputs = [val for arg, val in zip(op_def.input_arg, inputs)
                            if arg.is_ref]
    with _MaybeColocateWith(must_colocate_inputs):
      # Add Op to graph
      # pylint: disable=protected-access
      op = g._create_op_internal(op_type_name, inputs, dtypes=None,
                                 name=scope, input_types=input_types,
                                 attrs=attr_protos, op_def=op_def)

    # `outputs` is returned as a separate return value so that the output
    # tensors can the `op` per se can be decoupled so that the
    # `op_callbacks` can function properly. See framework/op_callbacks.py
    # for more details.
    outputs = op.outputs
    # Conditionally invoke tfdbg v2's op callback(s).
    if op_callbacks.should_invoke_op_callbacks():
      callback_outputs = op_callbacks.invoke_op_callbacks(
          op.node_def.op, tuple(op.inputs), attr_protos, tuple(outputs),
          op_name=op.name, graph=g)
      if callback_outputs is not None:
        outputs = callback_outputs

    return output_structure, op_def.is_stateful, op, outputs
 def setUp(self):
     super().setUp()
     flags.config().saved_model_fingerprinting.reset(True)