Esempio n. 1
0
    def testMemoizedInferrerLoading(self):
        inferrer = inference.memoized_inferrer(test_util.savedmodel_path(),
                                               memoize_inference_results=True)
        memoized_inferrer = inference.memoized_inferrer(
            test_util.savedmodel_path(), memoize_inference_results=True)

        self.assertIs(inferrer, memoized_inferrer)
Esempio n. 2
0
    def testMemoizedInferenceResults(self):
        inferrer = inference.Inferrer(test_util.savedmodel_path(),
                                      memoize_inference_results=True)
        activations = inferrer._get_activations_for_batch(('ADE', ))
        memoized_activations = inferrer._get_activations_for_batch(('ADE', ))

        self.assertIs(activations, memoized_activations)
Esempio n. 3
0
 def testStringInput(self):
     inferrer = inference.Inferrer(test_util.savedmodel_path())
     # Simulate failure to use a list.
     with self.assertRaisesRegex(
             ValueError, '`list_of_seqs` should be convertible to a '
             'numpy vector of strings. Got *'):
         inferrer.get_activations('QP')
Esempio n. 4
0
    def testBatchedInference(self):
        inferrer = inference.Inferrer(test_util.savedmodel_path(),
                                      batch_size=5)

        input_seq = 'AP'
        for total_size in range(15):
            full_list = [input_seq] * total_size
            activations = inferrer.get_activations(full_list)
            self.assertLen(full_list, activations.shape[0])
Esempio n. 5
0
    def testSortUnsortInference(self):
        inferrer = inference.Inferrer(test_util.savedmodel_path(),
                                      batch_size=1)

        input_seqs = ['AP', 'APP', 'AP']
        # Sorting will move long sequence to the end.
        activations = inferrer.get_activations(input_seqs)
        # Make sure it gets moved back to the middle.
        self.assertAllClose(activations[0], activations[2])
        self.assertNotAllClose(activations[0], activations[1])
Esempio n. 6
0
    def testCanInfer(self):

        graph = tf.Graph()
        with graph.as_default():
            sequences = tf.placeholder(shape=[None], dtype=tf.string)
            output_tensor = inference.in_graph_inferrer(
                sequences, test_util.savedmodel_path(), tf.saved_model.
                signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY)

        input_seqs = [''.join(utils.FULL_RESIDUE_VOCAB), 'ACD']
        with self.session(graph=graph) as sess:
            sess.run(tf.global_variables_initializer())
            sess.run(tf.tables_initializer())
            result = sess.run(output_tensor, feed_dict={sequences: input_seqs})

        self.assertLen(result, 2)
Esempio n. 7
0
 def testGetVariable(self):
     inferrer = inference.Inferrer(test_util.savedmodel_path())
     output = inferrer.get_variable('conv1d/bias:0')
     self.assertNotEmpty(output)