def testBasic(self): base_path = tf.test.test_src_dir_path( "contrib/session_bundle/example/half_plus_two/00000123") tf.reset_default_graph() sess, meta_graph_def = session_bundle.load_session_bundle_from_path( base_path, target="", config=tf.ConfigProto(device_count={"CPU": 2})) self.assertTrue(sess) asset_path = os.path.join(base_path, constants.ASSETS_DIRECTORY) with sess.as_default(): path1, path2 = sess.run(["filename1:0", "filename2:0"]) self.assertEqual( compat.as_bytes(os.path.join(asset_path, "hello1.txt")), path1) self.assertEqual( compat.as_bytes(os.path.join(asset_path, "hello2.txt")), path2) collection_def = meta_graph_def.collection_def signatures_any = collection_def[constants.SIGNATURES_KEY].any_list.value self.assertEquals(len(signatures_any), 1) signatures = manifest_pb2.Signatures() signatures_any[0].Unpack(signatures) self._checkRegressionSignature(signatures, sess) self._checkNamedSigantures(signatures, sess)
def testVarCheckpointV2(self): base_path = test.test_src_dir_path( "contrib/session_bundle/testdata/half_plus_two_ckpt_v2/00000123") ops.reset_default_graph() sess, meta_graph_def = session_bundle.load_session_bundle_from_path( base_path, target="", config=config_pb2.ConfigProto(device_count={"CPU": 2})) self.assertTrue(sess) asset_path = os.path.join(base_path, constants.ASSETS_DIRECTORY) with sess.as_default(): path1, path2 = sess.run(["filename1:0", "filename2:0"]) self.assertEqual( compat.as_bytes(os.path.join(asset_path, "hello1.txt")), path1) self.assertEqual( compat.as_bytes(os.path.join(asset_path, "hello2.txt")), path2) collection_def = meta_graph_def.collection_def signatures_any = collection_def[constants.SIGNATURES_KEY].any_list.value self.assertEquals(len(signatures_any), 1) signatures = manifest_pb2.Signatures() signatures_any[0].Unpack(signatures) self._checkRegressionSignature(signatures, sess) self._checkNamedSignatures(signatures, sess)
def testBasic(self): base_path = tf.test.test_src_dir_path( "contrib/session_bundle/example/half_plus_two/00000123") tf.reset_default_graph() sess, meta_graph_def = session_bundle.load_session_bundle_from_path( base_path, target="", config=tf.ConfigProto(device_count={"CPU": 2})) self.assertTrue(sess) asset_path = os.path.join(base_path, constants.ASSETS_DIRECTORY) with sess.as_default(): path1, path2 = sess.run(["filename1:0", "filename2:0"]) self.assertEqual( compat.as_bytes(os.path.join(asset_path, "hello1.txt")), path1) self.assertEqual( compat.as_bytes(os.path.join(asset_path, "hello2.txt")), path2) collection_def = meta_graph_def.collection_def signatures_any = collection_def[constants.SIGNATURES_KEY].any_list.value self.assertEquals(len(signatures_any), 1) signatures = manifest_pb2.Signatures() signatures_any[0].Unpack(signatures) default_signature = signatures.default_signature input_name = default_signature.regression_signature.input.tensor_name output_name = default_signature.regression_signature.output.tensor_name y = sess.run([output_name], {input_name: np.array([[0], [1], [2], [3]])}) # The operation is y = 0.5 * x + 2 self.assertEqual(y[0][0], 2) self.assertEqual(y[0][1], 2.5) self.assertEqual(y[0][2], 3) self.assertEqual(y[0][3], 3.5)
def testBadPath(self): base_path = tf.test.test_src_dir_path("/no/such/a/dir") tf.reset_default_graph() with self.assertRaises(RuntimeError) as cm: _, _ = session_bundle.load_session_bundle_from_path( base_path, target="local", config=tf.ConfigProto(device_count={"CPU": 2})) self.assertTrue("Expected meta graph file missing" in str(cm.exception))
def testBadPath(self): base_path = tf.test.test_src_dir_path("/no/such/a/dir") tf.reset_default_graph() with self.assertRaises(RuntimeError) as cm: _, _ = session_bundle.load_session_bundle_from_path( base_path, target="local", config=tf.ConfigProto(device_count={"CPU": 2})) self.assertTrue("Expected meta graph file missing" in str(cm.exception))
def extract_embeddings(): """Extract embedding vectors from the specified model and paths.""" session, _ = session_bundle.load_session_bundle_from_path(FLAGS.model_path) all_paths = FLAGS.input_path.split(',') with tf.gfile.Open(FLAGS.output_path, 'w') as out: for dataset, pattern in enumerate(all_paths, start=FLAGS.offset): paths = tf.gfile.Glob(pattern) for path in paths: make_request(dataset, path, out, session)
def _load_saved_model_from_session_bundle_path(export_dir, target, config): """Load legacy TF Exporter/SessionBundle checkpoint. Args: export_dir: the directory that contains files exported by exporter. target: The execution engine to connect to. See target in tf.compat.v1.Session() config: A ConfigProto proto with configuration options. See config in tf.compat.v1.Session() Returns: session: a tensorflow session created from the variable files. metagraph_def: The `MetaGraphDef` protocol buffer loaded in the provided session. This can be used to further extract signature-defs, collection-defs, etc. This model is up-converted to SavedModel format. Specifically, metagraph_def SignatureDef field is populated with Signatures converted from legacy signatures contained within CollectionDef Raises: RuntimeError: If metagraph already contains signature_def and cannot be up-converted. """ meta_graph_filename = os.path.join( export_dir, legacy_constants.META_GRAPH_DEF_FILENAME) metagraph_def = meta_graph.read_meta_graph_file(meta_graph_filename) if metagraph_def.signature_def: raise RuntimeError("Legacy graph contains signature def, unable to " "up-convert.") # Add SignatureDef to metagraph. default_signature_def, named_signature_def = ( _convert_signatures_to_signature_defs(metagraph_def)) if default_signature_def: metagraph_def.signature_def[ signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY].CopyFrom( default_signature_def) if named_signature_def: signature_def_key = signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY if default_signature_def: signature_def_key += "_from_named" metagraph_def.signature_def[signature_def_key].CopyFrom( named_signature_def) # We cannot just output session we loaded with older metagraph_def and # up-converted metagraph definition because Session has an internal object of # type Graph which is populated from meta_graph_def. If we do not create # session with our new meta_graph_def, then Graph will be out of sync with # meta_graph_def. sess, metagraph_def = session_bundle.load_session_bundle_from_path( export_dir, target, config, meta_graph_def=metagraph_def) return sess, metagraph_def
def _load_saved_model_from_session_bundle_path(export_dir, target, config): """Load legacy TF Exporter/SessionBundle checkpoint. Args: export_dir: the directory that contains files exported by exporter. target: The execution engine to connect to. See target in tf.compat.v1.Session() config: A ConfigProto proto with configuration options. See config in tf.compat.v1.Session() Returns: session: a tensorflow session created from the variable files. metagraph_def: The `MetaGraphDef` protocol buffer loaded in the provided session. This can be used to further extract signature-defs, collection-defs, etc. This model is up-converted to SavedModel format. Specifically, metagraph_def SignatureDef field is populated with Signatures converted from legacy signatures contained within CollectionDef Raises: RuntimeError: If metagraph already contains signature_def and cannot be up-converted. """ meta_graph_filename = os.path.join(export_dir, legacy_constants.META_GRAPH_DEF_FILENAME) metagraph_def = meta_graph.read_meta_graph_file(meta_graph_filename) if metagraph_def.signature_def: raise RuntimeError("Legacy graph contains signature def, unable to " "up-convert.") # Add SignatureDef to metagraph. default_signature_def, named_signature_def = ( _convert_signatures_to_signature_defs(metagraph_def)) if default_signature_def: metagraph_def.signature_def[ signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY].CopyFrom( default_signature_def) if named_signature_def: signature_def_key = signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY if default_signature_def: signature_def_key += "_from_named" metagraph_def.signature_def[signature_def_key].CopyFrom(named_signature_def) # We cannot just output session we loaded with older metagraph_def and # up-converted metagraph definition because Session has an internal object of # type Graph which is populated from meta_graph_def. If we do not create # session with our new meta_graph_def, then Graph will be out of sync with # meta_graph_def. sess, metagraph_def = session_bundle.load_session_bundle_from_path( export_dir, target, config, meta_graph_def=metagraph_def) return sess, metagraph_def
def load_graph(self, output_graph_path): sess, meta_graph_def = session_bundle.load_session_bundle_from_path( output_graph_path) with sess.as_default(): collection_def = meta_graph_def.collection_def signatures_any = collection_def[ constants.SIGNATURES_KEY].any_list.value signatures = manifest_pb2.Signatures() signatures_any[0].Unpack(signatures) default_signature = signatures.default_signature input_name = default_signature.classification_signature.input.tensor_name output_name = default_signature.classification_signature.scores.tensor_name classes = default_signature.classification_signature.classes.tensor_name classes = sess.run(sess.graph.get_tensor_by_name(classes)) return (sess, input_name, output_name,classes)
def load_graph(self, output_graph_path): sess, meta_graph_def = session_bundle.load_session_bundle_from_path( output_graph_path) with sess.as_default(): collection_def = meta_graph_def.collection_def signatures_any = collection_def[ constants.SIGNATURES_KEY].any_list.value signatures = manifest_pb2.Signatures() signatures_any[0].Unpack(signatures) default_signature = signatures.default_signature input_name = default_signature.classification_signature.input.tensor_name output_name = default_signature.classification_signature.scores.tensor_name classes = default_signature.classification_signature.classes.tensor_name classes = sess.run(sess.graph.get_tensor_by_name(classes)) return (sess, input_name, output_name, classes)
def testBasic(self): base_path = tf.test.test_src_dir_path( "contrib/session_bundle/example/half_plus_two/00000123") tf.reset_default_graph() sess, meta_graph_def = session_bundle.load_session_bundle_from_path( base_path, target="", config=tf.ConfigProto(device_count={"CPU": 2})) self.assertTrue(sess) asset_path = os.path.join(base_path, constants.ASSETS_DIRECTORY) with sess.as_default(): path1, path2 = sess.run(["filename1:0", "filename2:0"]) self.assertEqual( compat.as_bytes(os.path.join(asset_path, "hello1.txt")), path1) self.assertEqual( compat.as_bytes(os.path.join(asset_path, "hello2.txt")), path2) collection_def = meta_graph_def.collection_def signatures_any = collection_def[ constants.SIGNATURES_KEY].any_list.value self.assertEquals(len(signatures_any), 1) signatures = manifest_pb2.Signatures() signatures_any[0].Unpack(signatures) default_signature = signatures.default_signature input_name = default_signature.regression_signature.input.tensor_name output_name = default_signature.regression_signature.output.tensor_name y = sess.run([output_name], {input_name: np.array([[0], [1], [2], [3]])}) # The operation is y = 0.5 * x + 2 self.assertEqual(y[0][0], 2) self.assertEqual(y[0][1], 2.5) self.assertEqual(y[0][2], 3) self.assertEqual(y[0][3], 3.5)
def testGraphWithoutVarsLoadsCorrectly(self): session, _ = session_bundle.load_session_bundle_from_path( self.base_path) got = session.run(["y:0"], {"x:0": 5.0})[0] self.assertEquals(got, 5.0 * 3.0 - 7.0) self.assertEquals(ops.get_collection("meta"), [b"this is meta"])
# weights_regularizer=slim.l2_regularizer(0.0005)): # net = tf.reshape(inputs,[-1,FLAGS.im_size ,FLAGS.im_size,3]) # net = slim.conv2d(net, 32, [3,3], scope='conv1') # net = slim.max_pool2d(net, [4,4], scope = 'conv1') # net = slim.conv2d(net,128,[3,3], scope = 'conv2') # net = slim.max_pool2d(net,[4,4], scope = 'pool2') # net = slim.flatten(net) # net = slim.fully_connected(net,64, scope = 'fc') # net = slim.fully_connected(net, len(labelmap), activation_fn = None, scope = 'output') # return net if __name__ == '__main__': #args = parse_args() # Create a session for running Ops on the Graph. session, _ = session_bundle.load_session_bundle_from_path( FLAGS.model_dir + '/' + '19000/') print session # get the mappings between aliases and tensor names # for both inputs and outputs input_alias_map = json.loads(session.graph.get_collection('inputs')[0]) output_alias_map = json.loads(session.graph.get_collection('outputs')[0]) aliases, tensor_names = zip(*output_alias_map.items()) np.random.seed(45) #shuffle the same way each time for consistency #examples = read_test_list(args.test_dir) examples = read_im_list('tf_test.csv') np.random.shuffle(examples) start_time = time.time() y_true = [] y_pred = [] count = 0
def testGraphWithoutVarsLoadsCorrectly(self): session, _ = session_bundle.load_session_bundle_from_path(self.base_path) got = session.run(["y:0"], {"x:0": 5.0})[0] self.assertEquals(got, 5.0 * 3.0 - 7.0) self.assertEquals(tf.get_collection("meta"), [b"this is meta"])
# weights_initializer=tf.truncated_normal_initializer(0.0, 0.01), # weights_regularizer=slim.l2_regularizer(0.0005)): # net = tf.reshape(inputs,[-1,FLAGS.im_size ,FLAGS.im_size,3]) # net = slim.conv2d(net, 32, [3,3], scope='conv1') # net = slim.max_pool2d(net, [4,4], scope = 'conv1') # net = slim.conv2d(net,128,[3,3], scope = 'conv2') # net = slim.max_pool2d(net,[4,4], scope = 'pool2') # net = slim.flatten(net) # net = slim.fully_connected(net,64, scope = 'fc') # net = slim.fully_connected(net, len(labelmap), activation_fn = None, scope = 'output') # return net if __name__ == '__main__': #args = parse_args() # Create a session for running Ops on the Graph. session, _ = session_bundle.load_session_bundle_from_path(FLAGS.model_dir + '/' + '12000/') print session # get the mappings between aliases and tensor names # for both inputs and outputs input_alias_map = json.loads(session.graph.get_collection('inputs')[0]) output_alias_map = json.loads(session.graph.get_collection('outputs')[0]) aliases, tensor_names = zip(*output_alias_map.items()) np.random.seed(45) #shuffle the same way each time for consistency #examples = read_test_list(args.test_dir) examples = read_im_list('tf_test.csv') np.random.shuffle(examples) start_time = time.time() y_true = [] y_pred = [] count = 0