Пример #1
0
def init_uninitialized_vars(target_vars: List[tf.Variable] = None) -> None:
    """Initialize all tf.Variables that have not already been initialized.

    Equivalent to the following, but more efficient and does not bloat the tf graph:
    tf.variables_initializer(tf.report_uninitialized_variables()).run()
    """
    assert_tf_initialized()
    if target_vars is None:
        target_vars = tf.global_variables()

    test_vars = []
    test_ops = []

    with tf.control_dependencies(
            None):  # ignore surrounding control_dependencies
        for var in target_vars:
            assert is_tf_expression(var)

            try:
                tf.get_default_graph().get_tensor_by_name(
                    var.name.replace(":0", "/IsVariableInitialized:0"))
            except KeyError:
                # Op does not exist => variable may be uninitialized.
                test_vars.append(var)

                with absolute_name_scope(var.name.split(":")[0]):
                    test_ops.append(tf.is_variable_initialized(var))

    init_vars = [
        var for var, inited in zip(test_vars, run(test_ops)) if not inited
    ]
    run([var.initializer for var in init_vars])
    def __init__(self, remote_vars, local_vars, *args, **kwargs):
        if local_vars:
            self.has_init = tf.is_variable_initialized(local_vars[0])
        else:
            self.has_init = tf.constant(True)

        ready_for_local_init_op = tf.report_uninitialized_variables(
            var_list=remote_vars)

        self.init_op = tf.group(tf.initialize_variables(local_vars),
                                *tf.get_collection(tf.GraphKeys.LOCAL_INIT_OP))

        if "scaffold" in kwargs:
            # TODO(lmetz) I think this could technically be supported?
            raise ValueError("Do not set scaffold on the session creator.")

        scaffold = tf.train.Scaffold(
            ready_for_local_init_op=ready_for_local_init_op,
            ready_op=ready_for_local_init_op,
            local_init_op=ready_for_local_init_op,
            init_op=ready_for_local_init_op,
            init_fn=self._maybe_initialize_local_vars_and_state_fn,
            summary_op=tf.summary.merge([tf.summary.scalar("dummy", 0)]))

        kwargs["scaffold"] = scaffold
        super(WorkerSessionCreator, self).__init__(*args, **kwargs)
Пример #3
0
def _create_var(name: str, value_expr: TfExpression) -> TfExpression:
    """Internal helper for creating autosummary accumulators."""
    assert not _finalized
    name_id = name.replace("/", "_")
    v = tf.cast(value_expr, _dtype)

    if v.shape.is_fully_defined():
        size = np.prod(v.shape.as_list())
        size_expr = tf.constant(size, dtype=_dtype)
    else:
        size = None
        size_expr = tf.reduce_prod(tf.cast(tf.shape(v), _dtype))

    if size == 1:
        if v.shape.ndims != 0:
            v = tf.reshape(v, [])
        v = [size_expr, v, tf.square(v)]
    else:
        v = [size_expr, tf.reduce_sum(v), tf.reduce_sum(tf.square(v))]
    v = tf.cond(tf.is_finite(v[1]), lambda: tf.stack(v),
                lambda: tf.zeros(3, dtype=_dtype))

    with tfutil.absolute_name_scope("Autosummary/" +
                                    name_id), tf.control_dependencies(None):
        var = tf.Variable(tf.zeros(3, dtype=_dtype),
                          trainable=False)  # [sum(1), sum(x), sum(x**2)]
    update_op = tf.cond(tf.is_variable_initialized(var),
                        lambda: tf.assign_add(var, v),
                        lambda: tf.assign(var, v))

    if name in _vars:
        _vars[name].append(var)
    else:
        _vars[name] = [var]
    return update_op
Пример #4
0
def get_uninitialized_variables(sess):
  global_vars = tf.global_variables()

  # print([str(i.name) for i in global_vars])

  is_not_initialized = sess.run([tf.is_variable_initialized(var) for var in global_vars])
  not_initialized_vars = [v for (v, f) in zip(global_vars, is_not_initialized) if not f]
  print([str(i.name) for i in not_initialized_vars])
  return not_initialized_vars
Пример #5
0
    def _initialize_uninitialized(self, sess):
        global_vars = tf.global_variables()
        is_not_initialized = sess.run(
            [tf.is_variable_initialized(var) for var in global_vars])
        not_initialized_vars = [
            v for (v, f) in zip(global_vars, is_not_initialized) if not f
        ]

        if not_initialized_vars:
            sess.run(tf.variables_initializer(not_initialized_vars))
def initialize_uninitialized(sess):
    global_vars = tf.global_variables()
    is_not_initialized = sess.run([tf.is_variable_initialized(var) for var in global_vars])
    not_initialized_vars = [v for (v, f) in zip(global_vars, is_not_initialized) if not f]

    # for i in not_initialized_vars: # only for testing
    #    print(i.name)

    if len(not_initialized_vars):
        sess.run(tf.variables_initializer(not_initialized_vars))

    return
Пример #7
0
def initialize_uninitialized_variables(sess):
    """
    Only initialize the weights that have not yet been initialized by other
    means, such as importing a metagraph and a checkpoint. It's useful when
    extending an existing model.
    """
    uninit_vars = []
    uninit_tensors = []
    for var in tf.global_variables():
        uninit_vars.append(var)
        uninit_tensors.append(tf.is_variable_initialized(var))
    uninit_bools = sess.run(uninit_tensors)
    uninit = zip(uninit_bools, uninit_vars)
    uninit = [var for init, var in uninit if not init]
    sess.run(tf.variables_initializer(uninit))
def initialize_uninitilized_global_variables(sess):
    # from https://github.com/tensorflow/cleverhans/tree/master/cleverhans
    # List all global variables
    global_vars = tf.global_variables()
    # Find initialized status for all variables
    is_var_init = [tf.is_variable_initialized(var) for var in global_vars]
    is_initialized = sess.run(is_var_init)

    # List all variables that were not initialialized previously
    not_initialized_vars = [
        var for (var, init) in zip(global_vars, is_initialized) if not init
    ]

    # Initialize all uninitialized variables found, if any
    if len(not_initialized_vars):
        sess.run(tf.variables_initializer(not_initialized_vars))
Пример #9
0
def init_uninited_vars(vars=None):
    if vars is None: vars = tf.global_variables()
    test_vars = []
    test_ops = []
    with tf.control_dependencies(
            None):  # ignore surrounding control_dependencies
        for var in vars:
            assert is_tf_expression(var)
            try:
                tf.get_default_graph().get_tensor_by_name(
                    var.name.replace(':0', '/IsVariableInitialized:0'))
            except KeyError:
                # Op does not exist => variable may be uninitialized.
                test_vars.append(var)
                with absolute_name_scope(var.name.split(':')[0]):
                    test_ops.append(tf.is_variable_initialized(var))
    init_vars = [
        var for var, inited in zip(test_vars, run(test_ops)) if not inited
    ]
    run([var.initializer for var in init_vars])
Пример #10
0
def initialize_uninitialized_global_variables(sess):
  """
  Only initializes the variables of a TensorFlow session that were not
  already initialized.
  :param sess: the TensorFlow session
  :return:
  """
  # List all global variables
  global_vars = tf.global_variables()

  # Find initialized status for all variables
  is_var_init = [tf.is_variable_initialized(var) for var in global_vars]
  is_initialized = sess.run(is_var_init)

  # List all variables that were not initialized previously
  not_initialized_vars = [var for (var, init) in
                          zip(global_vars, is_initialized) if not init]

  # Initialize all uninitialized variables found, if any
  if len(not_initialized_vars):
    sess.run(tf.variables_initializer(not_initialized_vars))
Пример #11
0
def _create_autosummary_var(name, value_expr):
    assert not _autosummary_finalized
    v = tf.cast(value_expr, tf.float32)
    if v.shape.ndims is 0:
        v = [v, np.float32(1.0)]
    elif v.shape.ndims is 1:
        v = [tf.reduce_sum(v), tf.cast(tf.shape(v)[0], tf.float32)]
    else:
        v = [
            tf.reduce_sum(v),
            tf.reduce_prod(tf.cast(tf.shape(v), tf.float32))
        ]
    v = tf.cond(tf.is_finite(v[0]), lambda: tf.stack(v), lambda: tf.zeros(2))
    with tf.control_dependencies(None):
        var = tf.Variable(tf.zeros(2))  # [numerator, denominator]
    update_op = tf.cond(tf.is_variable_initialized(var),
                        lambda: tf.assign_add(var, v),
                        lambda: tf.assign(var, v))
    if name in _autosummary_vars:
        _autosummary_vars[name].append(var)
    else:
        _autosummary_vars[name] = [var]
    return update_op