def register_func(var_name): """Resiger function to NPU""" ops.register_proto_function( '{}_{}'.format(_NPU_RUNCONFIG, var_name), proto_type=variable_pb2.VariableDef, to_proto=resource_variable_ops._to_proto_fn, from_proto=resource_variable_ops._from_proto_fn)
# Following check is due to backward compatibility. (b/62061352) if sess.__class__.__name__ in [ "MonitoredSession", "SingularMonitoredSession" ]: return [] raise TypeError("sess must be a `tf.Session` object. " "Given class: {}".format(sess.__class__)) queue_runners = ops.get_collection(collection) if not queue_runners: logging.warning( "`tf.train.start_queue_runners()` was called when no queue runners " "were defined. You can safely remove the call to this deprecated " "function.") with sess.graph.as_default(): threads = [] for qr in ops.get_collection(collection): threads.extend( qr.create_threads(sess, coord=coord, daemon=daemon, start=start)) return threads ops.register_proto_function(ops.GraphKeys.QUEUE_RUNNERS, proto_type=queue_runner_pb2.QueueRunnerDef, to_proto=QueueRunner.to_proto, from_proto=QueueRunner.from_proto)
sess: `Session` used to run the queue ops. Defaults to the default session. coord: Optional `Coordinator` for coordinating the started threads. daemon: Whether the threads should be marked as `daemons`, meaning they don't block program exit. start: Set to `False` to only create the threads, not start them. collection: A `GraphKey` specifying the graph collection to get the queue runners from. Defaults to `GraphKeys.QUEUE_RUNNERS`. Returns: A list of threads. """ if sess is None: sess = ops.get_default_session() if not sess: raise ValueError("Cannot start queue runners: No default session is " "registered. Use `with sess.as_default()` or pass an " "explicit session to tf.start_queue_runners(sess=sess)") with sess.graph.as_default(): threads = [] for qr in ops.get_collection(collection): threads.extend(qr.create_threads(sess, coord=coord, daemon=daemon, start=start)) return threads ops.register_proto_function(ops.GraphKeys.QUEUE_RUNNERS, proto_type=queue_runner_pb2.QueueRunnerDef, to_proto=QueueRunner.to_proto, from_proto=QueueRunner.from_proto)
return None else: ranks = [] for var in var_list: with ops.device(var.device): ranks.append(array_ops.rank(var)) if len(ranks) == 1: return ranks[0] else: return array_ops.pack(ranks) # pylint: disable=protected-access ops.register_tensor_conversion_function(Variable, Variable._TensorConversionFunction) Variable._OverloadAllOperators() # pylint: enable=protected-access ops.register_proto_function(ops.GraphKeys.VARIABLES, proto_type=variable_pb2.VariableDef, to_proto=Variable.to_proto, from_proto=Variable.from_proto) ops.register_proto_function(ops.GraphKeys.TRAINABLE_VARIABLES, proto_type=variable_pb2.VariableDef, to_proto=Variable.to_proto, from_proto=Variable.from_proto) ops.register_proto_function(ops.GraphKeys.MOVING_AVERAGE_VARIABLES, proto_type=variable_pb2.VariableDef, to_proto=Variable.to_proto, from_proto=Variable.from_proto)
def _to_proto_fn(v, export_scope=None): """Converts Variable and ResourceVariable to VariableDef for collections.""" return v.to_proto(export_scope=export_scope) def _from_proto_fn(v, import_scope=None): """Creates Variable or ResourceVariable from VariableDef as needed.""" if v.is_resource: return ResourceVariable.from_proto(v, import_scope=import_scope) return variables.Variable.from_proto(v, import_scope=import_scope) ops.register_proto_function(ops.GraphKeys.GLOBAL_VARIABLES, proto_type=variable_pb2.VariableDef, to_proto=_to_proto_fn, from_proto=_from_proto_fn) ops.register_proto_function(ops.GraphKeys.TRAINABLE_VARIABLES, proto_type=variable_pb2.VariableDef, to_proto=_to_proto_fn, from_proto=_from_proto_fn) ops.register_proto_function(ops.GraphKeys.MOVING_AVERAGE_VARIABLES, proto_type=variable_pb2.VariableDef, to_proto=_to_proto_fn, from_proto=_from_proto_fn) ops.register_proto_function(ops.GraphKeys.LOCAL_VARIABLES, proto_type=variable_pb2.VariableDef, to_proto=_to_proto_fn, from_proto=_from_proto_fn) ops.register_proto_function(ops.GraphKeys.MODEL_VARIABLES, proto_type=variable_pb2.VariableDef,
hparam_proto = hparam_pb2.HParamDef() for name in self._hparam_types: # Parse the values. param_type, is_list = self._hparam_types.get(name, (None, None)) kind = HParams._get_kind_name(param_type, is_list) if is_list: if kind.startswith('bytes'): v_list = [compat.as_bytes(v) for v in getattr(self, name)] else: v_list = [v for v in getattr(self, name)] getattr(hparam_proto.hparam[name], kind).value.extend(v_list) else: v = getattr(self, name) if kind.startswith('bytes'): v = compat.as_bytes(getattr(self, name)) setattr(hparam_proto.hparam[name], kind, v) return hparam_proto @staticmethod def from_proto(hparam_def, import_scope=None): # pylint: disable=unused-argument return HParams(hparam_def=hparam_def) ops.register_proto_function( 'hparams', proto_type=hparam_pb2.HParamDef, to_proto=HParams.to_proto, from_proto=HParams.from_proto)
def _to_proto_fn(v, export_scope=None): """Converts Variable and ResourceVariable to VariableDef for collections.""" return v.to_proto(export_scope=export_scope) def _from_proto_fn(v, import_scope=None): """Creates Variable or ResourceVariable from VariableDef as needed.""" if v.is_resource: return ResourceVariable.from_proto(v, import_scope=import_scope) return variables.Variable.from_proto(v, import_scope=import_scope) ops.register_proto_function( ops.GraphKeys.GLOBAL_VARIABLES, proto_type=variable_pb2.VariableDef, to_proto=_to_proto_fn, from_proto=_from_proto_fn) ops.register_proto_function( ops.GraphKeys.TRAINABLE_VARIABLES, proto_type=variable_pb2.VariableDef, to_proto=_to_proto_fn, from_proto=_from_proto_fn) ops.register_proto_function( ops.GraphKeys.MOVING_AVERAGE_VARIABLES, proto_type=variable_pb2.VariableDef, to_proto=_to_proto_fn, from_proto=_from_proto_fn) ops.register_proto_function( ops.GraphKeys.LOCAL_VARIABLES, proto_type=variable_pb2.VariableDef,
variable_names_tensor = array_ops.constant([s.op.name for s in var_list]) # Return a 1-D tensor containing all the names of uninitialized variables. return array_ops.boolean_mask(variable_names_tensor, variables_mask) # pylint: disable=protected-access ops.register_tensor_conversion_function(Variable, Variable._TensorConversionFunction) Variable._OverloadAllOperators() ops.register_tensor_conversion_function(PartitionedVariable, PartitionedVariable._TensorConversionFunction) # pylint: enable=protected-access ops.register_dense_tensor_like_type(Variable) ops.register_proto_function( ops.GraphKeys.GLOBAL_VARIABLES, proto_type=variable_pb2.VariableDef, to_proto=Variable.to_proto, from_proto=Variable.from_proto, ) ops.register_proto_function( ops.GraphKeys.TRAINABLE_VARIABLES, proto_type=variable_pb2.VariableDef, to_proto=Variable.to_proto, from_proto=Variable.from_proto, ) ops.register_proto_function( ops.GraphKeys.MOVING_AVERAGE_VARIABLES, proto_type=variable_pb2.VariableDef, to_proto=Variable.to_proto, from_proto=Variable.from_proto, )
def to_proto(self, export_scope=None): del export_scope # Unused result = attr_value_pb2.AttrValue() result.s = dill.dumps(self) return result # Register the pyfuncs collection to use `AttrValue` proto type. # The proto object stored in the graph collection will contain the pickled value # of a `_PyFuncDef` object as a string in its `s` field. # Note that `AttrValue` is used here only as a convenient placeholder for a # string, and does not represent the actual attributes of an `op` as in the # usual case. ops.register_proto_function(_PYFUNC_COLLECTION_KEY, proto_type=attr_value_pb2.AttrValue, to_proto=_PyFuncDef.to_proto, from_proto=_PyFuncDef.from_proto) def insert_pyfunc(func, Tout, stateful, name, *args): # pylint: disable=invalid-name """Calls tf.py_func and inserts the `func` in the internal registry.""" result = tf.compat.v1.py_func(func, inp=list(args), Tout=Tout, stateful=stateful, name=name) token = result.op.node_def.attr['token'].s tf.compat.v1.add_to_collection(_PYFUNC_COLLECTION_KEY, _PyFuncDef(token, func)) return result
connected_subgraphs.append(connected_subgraph) return module_info def _module_info_from_proto_safe(module_info_def, import_scope=None): """Deserializes the `module_info_def` proto without raising exceptions. Args: module_info_def: An instance of `module_pb2.SonnetModule`. import_scope: Optional `string`. Name scope to use. Returns: An instance of `ModuleInfo`. """ try: return _module_info_from_proto(module_info_def, import_scope) except Exception as e: # pylint: disable=broad-except logging.warning( "Error encountered when deserializing sonnet ModuleInfo:\n%s", str(e)) return None # `to_proto` is already wrapped into a try...except externally but # `from_proto` isn't. In order to minimize disruption, catch all the exceptions # happening during `from_proto` and just log them. ops.register_proto_function(SONNET_COLLECTION_NAME, module_pb2.SonnetModule, to_proto=_module_info_to_proto, from_proto=_module_info_from_proto_safe)
This function exports the graph, saver, and collection objects into `MetaGraphDef` protocol buffer with the intension of it being imported at a later time or location to restart training, run inference, or be a subgraph. Args: filename: Optional filename including the path for writing the generated `MetaGraphDef` protocol buffer. meta_info_def: `MetaInfoDef` protocol buffer. graph_def: `GraphDef` protocol buffer. saver_def: `SaverDef` protocol buffer. collection_list: List of string keys to collect. Returns: A `MetaGraphDef` proto. """ meta_graph_def = _as_meta_graph_def(meta_info_def=meta_info_def, graph_def=graph_def, saver_def=saver_def, collection_list=collection_list) if filename: training_util.write_graph(meta_graph_def, os.path.dirname(filename), os.path.basename(filename)) return meta_graph_def ops.register_proto_function(ops.GraphKeys.SAVERS, proto_type=saver_pb2.SaverDef, to_proto=Saver.to_proto, from_proto=Saver.from_proto)
E-mail: [email protected] """ import numpy as np import tensorflow as tf from config import cfg from utils import reduce_sum from utils import softmax from tensorflow.core.framework import variable_pb2 from tensorflow.python.framework import ops from tensorflow.python.ops import variables from tensorflow.python.framework.ops import register_proto_function register_proto_function(ops.GraphKeys.LOCAL_VARIABLES, proto_type=variable_pb2.VariableDef, to_proto=variables.Variable.to_proto, from_proto=variables.Variable.from_proto) epsilon = 1e-9 class CapsLayer(object): ''' Capsule layer. Args: input: A 4-D tensor. num_outputs: the number of capsule in this layer. vec_len: integer, the length of the output vector of a capsule. layer_type: string, one of 'FC' or "CONV", the type of this layer, fully connected or convolution, for the future expansion capability with_routing: boolean, this capsule is routing with the lower-level layer capsule.