def test_numeric_column_multi_dim(self):
        """Tests sequence_input_layer for multi-dimensional numeric_column."""
        sparse_input = sparse_tensor.SparseTensorValue(
            # example 0, values [[[0., 1.],  [2., 3.]], [[4., 5.],  [6., 7.]]]
            # example 1, [[[10., 11.],  [12., 13.]]]
            indices=((0, 0), (0, 1), (0, 2), (0, 3), (0, 4), (0, 5), (0, 6),
                     (0, 7), (1, 0), (1, 1), (1, 2), (1, 3)),
            values=(0., 1., 2., 3., 4., 5., 6., 7., 10., 11., 12., 13.),
            dense_shape=(2, 8))
        # The output of numeric_column._get_dense_tensor should be flattened.
        expected_input_layer = [
            [[0., 1., 2., 3.], [4., 5., 6., 7.]],
            [[10., 11., 12., 13.], [0., 0., 0., 0.]],
        ]
        expected_sequence_length = [2, 1]
        numeric_column = sfc.sequence_numeric_column('aaa', shape=(2, 2))

        input_layer, sequence_length = sfc.sequence_input_layer(
            features={'aaa': sparse_input}, feature_columns=[numeric_column])

        with monitored_session.MonitoredSession() as sess:
            self.assertAllEqual(expected_input_layer,
                                input_layer.eval(session=sess))
            self.assertAllEqual(expected_sequence_length,
                                sequence_length.eval(session=sess))
Пример #2
0
  def __init__(self,
               estimator,
               serving_input_receiver_fn,
               output_key=None,
               graph=None,
               config=None):
    """Initialize a `CoreEstimatorPredictor`.

    Args:
      estimator: an instance of `learn.python.estimator.Estimator`.
      serving_input_receiver_fn: a function that takes no arguments and returns
        an instance of `ServingInputReceiver` compatible with `estimator`.
      output_key: Optional string specifying the export output to use. If
        `None`, then `DEFAULT_SERVING_SIGNATURE_DEF_KEY` is used.
      graph: Optional. The Tensorflow `graph` in which prediction should be
        done.
      config: `ConfigProto` proto used to configure the session.
    """
    self._graph = graph or ops.Graph()
    with self._graph.as_default():
      serving_input_receiver = serving_input_receiver_fn()
      signature_def = _get_signature_def(
          serving_input_receiver, estimator, output_key)
      checkpoint_dir = estimator.model_dir
      self._session = monitored_session.MonitoredSession(
          session_creator=monitored_session.ChiefSessionCreator(
              config=config,
              checkpoint_dir=checkpoint_dir))

    feed_tensor_info = signature_def.inputs
    self._feed_tensors = {k: self._graph.get_tensor_by_name(v.name)
                          for k, v in feed_tensor_info.items()}
    fetch_tensor_info = signature_def.outputs
    self._fetch_tensors = {k: self._graph.get_tensor_by_name(v.name)
                           for k, v in fetch_tensor_info.items()}
  def _between_graph_with_monitored_session(self, strategy):
    context = distribute_coordinator_context.get_current_worker_context()
    self.assertTrue(context is not None)
    with ops.device("/job:ps/task:0"):
      # TODO(yuefengz): investigate why not using resource variable will make
      # the test flaky.
      x = variable_scope.get_variable("xx", initializer=10.0, use_resource=True)
    with ops.device("/job:ps/task:1"):
      y = variable_scope.get_variable("yy", initializer=20.0, use_resource=True)

    x_add = x.assign_add(2.0)
    y_sub = y.assign_sub(2.0)
    train_op = control_flow_ops.group([x_add, y_sub])

    # The monitored session will run init or ready ops.
    with monitored_session.MonitoredSession() as sess:
      sess.run(train_op)

      # Synchronize workers after one step to make sure they all have finished
      # training.
      if context.has_barrier:
        context.wait_for_other_workers()
      else:
        self._barrier.wait()

      x_val, y_val = sess.run([x, y])

    self.assertEqual(x_val, 16.0)
    self.assertEqual(y_val, 14.0)
    if x_val == 16.0 and y_val == 14.0:
      with self._lock:
        self._result_correct += 1
    def test_get_sequence_dense_tensor(self):
        vocabulary_size = 3
        sparse_input = sparse_tensor.SparseTensorValue(
            # example 0, ids [2]
            # example 1, ids [0, 1]
            # example 2, ids []
            # example 3, ids [1]
            indices=((0, 0), (1, 0), (1, 1), (3, 0)),
            values=(2, 0, 1, 1),
            dense_shape=(4, 2))

        expected_lookups = [
            # example 0, ids [2]
            [[0., 0., 1.], [0., 0., 0.]],
            # example 1, ids [0, 1]
            [[1., 0., 0.], [0., 1., 0.]],
            # example 2, ids []
            [[0., 0., 0.], [0., 0., 0.]],
            # example 3, ids [1]
            [[0., 1., 0.], [0., 0., 0.]],
        ]

        categorical_column = sfc.sequence_categorical_column_with_identity(
            key='aaa', num_buckets=vocabulary_size)
        indicator_column = sfc._sequence_indicator_column(categorical_column)

        indicator_tensor, _ = indicator_column._get_sequence_dense_tensor(
            _LazyBuilder({'aaa': sparse_input}))

        with monitored_session.MonitoredSession() as sess:
            self.assertAllEqual(expected_lookups,
                                indicator_tensor.eval(session=sess))
def _decode_infer_model(input_fn,
                        feed_fn=None,
                        model_fn=None,
                        runCfg=None,
                        model_dir=None,
                        outputs=None,
                        as_iterable=True,
                        iterate_batches=False):
    # Check that model has been trained.
    checkpoint_path = saver.latest_checkpoint(model_dir)
    if not checkpoint_path:
        raise NotFittedError("Couldn't find trained model at %s." % model_dir)
    with tf.Graph().as_default() as g:
        random_seed.set_random_seed(runCfg.tf_random_seed)
        contrib_framework.create_global_step(g)
        features = input_fn()
        model_result, _, _ = model_fn(features,
                                      None,
                                      mode=tf.contrib.learn.ModeKeys.INFER)
        mon_sess = monitored_session.MonitoredSession(
            session_creator=monitored_session.ChiefSessionCreator(
                checkpoint_filename_with_path=checkpoint_path,
                scaffold=None,
                config=runCfg._session_config))

        return _decode_predict_generator(mon_sess, model_result, feed_fn,
                                         iterate_batches)
Пример #6
0
    def testArrayAndDictGiveSameOutput(self):
        a = np.arange(4) * 1.0
        b = np.arange(32, 36)
        x_arr = np.vstack((a, b))
        x_dict = {'feature1': x_arr}
        y = np.arange(-48, -40).reshape(2, 4)

        input_fn_arr = numpy_io.numpy_input_fn(x_arr,
                                               y,
                                               batch_size=2,
                                               shuffle=False,
                                               num_epochs=1)
        features_arr, targets_arr = input_fn_arr()

        input_fn_dict = numpy_io.numpy_input_fn(x_dict,
                                                y,
                                                batch_size=2,
                                                shuffle=False,
                                                num_epochs=1)
        features_dict, targets_dict = input_fn_dict()

        with monitored_session.MonitoredSession() as session:
            res_arr, res_dict = session.run([(features_arr, targets_arr),
                                             (features_dict, targets_dict)])

            self.assertAllEqual(res_arr[0], res_dict[0]['feature1'])
            self.assertAllEqual(res_arr[1], res_dict[1])
    def test_sequence_length_with_empty_rows(self):
        """Tests _sequence_length when some examples do not have ids."""
        vocabulary_size = 3
        sparse_input = sparse_tensor.SparseTensorValue(
            # example 0, ids []
            # example 1, ids [2]
            # example 2, ids [0, 1]
            # example 3, ids []
            # example 4, ids [1]
            # example 5, ids []
            indices=((1, 0), (2, 0), (2, 1), (4, 0)),
            values=(2, 0, 1, 1),
            dense_shape=(6, 2))
        expected_sequence_length = [0, 1, 2, 0, 1, 0]

        categorical_column = sfc.sequence_categorical_column_with_identity(
            key='aaa', num_buckets=vocabulary_size)
        indicator_column = sfc._sequence_indicator_column(categorical_column)

        _, sequence_length = indicator_column._get_sequence_dense_tensor(
            _LazyBuilder({'aaa': sparse_input}))

        with monitored_session.MonitoredSession() as sess:
            self.assertAllEqual(expected_sequence_length,
                                sequence_length.eval(session=sess))
  def testSessionRunHook(self):
    a = array_ops.placeholder(dtypes.float32, [10])
    b = a + 1
    c = b * 2

    class Hook(session_run_hook.SessionRunHook):

      def before_run(self, _):
        return session_run_hook.SessionRunArgs(fetches=c)

    class Hook2(session_run_hook.SessionRunHook):

      def before_run(self, _):
        return session_run_hook.SessionRunArgs(fetches=b)

    sess = session.Session()
    sess = LocalCLIDebuggerWrapperSessionForTest([["run"], ["run"]], sess)

    class SessionCreator(object):

      def create_session(self):
        return sess

    final_sess = monitored_session.MonitoredSession(
        session_creator=SessionCreator(), hooks=[Hook(), Hook2()])

    final_sess.run(b, feed_dict={a: np.arange(10)})
    debug_dumps = sess.observers["debug_dumps"]
    self.assertEqual(1, len(debug_dumps))
    debug_dump = debug_dumps[0]
    node_names = [datum.node_name for datum in debug_dump.dumped_tensor_data]
    self.assertIn(b.op.name, node_names)
Пример #9
0
def run(checkpoint, batch_size, dataset_name, images_path):
    # Create model
    images_placeholder, endpoints = create_model(batch_size, dataset_name)

    # Load pre-trained model
    session_creator = monitored_session.ChiefSessionCreator(
        checkpoint_filename_with_path=checkpoint)

    # Find images
    img_names = os.listdir(images_path)
    img_names.sort()
    print("\nNumber of images to process : ", len(img_names))
    img_paths = [images_path + img_name for img_name in img_names]
    print("Number of images paths : ", len(img_paths))
    global_results = []
    with monitored_session.MonitoredSession(
            session_creator=session_creator) as sess:
        # Loop per batch of size 1
        for i, img_path in enumerate(img_paths):
            print("\nNew Image :", img_path)

            images_data = load_images([img_path], batch_size, dataset_name)

            predictions = sess.run(endpoints.predicted_text,
                                   feed_dict={images_placeholder: images_data})
            result = [
                pr_bytes.decode('utf-8') for pr_bytes in predictions.tolist()
            ]
            for line in result:
                print(result)
                global_results.append(line)
            print("Image :", i)
    return global_results
  def test_get_sequence_dense_tensor_with_normalizer_fn(self):

    def _increment_two(input_sparse_tensor):
      return sparse_ops.sparse_add(
          input_sparse_tensor,
          sparse_tensor.SparseTensor(((0, 0), (1, 1)), (2.0, 2.0), (2, 2))
      )

    sparse_input = sparse_tensor.SparseTensorValue(
        # example 0, values [[0.], [1]]
        # example 1, [[10.]]
        indices=((0, 0), (0, 1), (1, 0)),
        values=(0., 1., 10.),
        dense_shape=(2, 2))

    # Before _increment_two:
    #   [[0.], [1.]],
    #   [[10.], [0.]],
    # After _increment_two:
    #   [[2.], [1.]],
    #   [[10.], [2.]],
    expected_dense_tensor = [
        [[2.], [1.]],
        [[10.], [2.]],
    ]
    numeric_column = sfc.sequence_numeric_column(
        'aaa', normalizer_fn=_increment_two)

    dense_tensor, _ = numeric_column._get_sequence_dense_tensor(
        _LazyBuilder({'aaa': sparse_input}))

    with monitored_session.MonitoredSession() as sess:
      self.assertAllEqual(
          expected_dense_tensor, dense_tensor.eval(session=sess))
    def test_sequence_length_not_equal(self):
        """Tests that an error is raised when sequence lengths are not equal."""
        # Input a with sequence_length = [2, 1]
        sparse_input_a = sparse_tensor.SparseTensorValue(indices=((0, 0),
                                                                  (0, 1), (1,
                                                                           0)),
                                                         values=(0., 1., 10.),
                                                         dense_shape=(2, 2))
        # Input b with sequence_length = [1, 1]
        sparse_input_b = sparse_tensor.SparseTensorValue(indices=((0, 0), (1,
                                                                           0)),
                                                         values=(1., 10.),
                                                         dense_shape=(2, 2))
        numeric_column_a = sfc.sequence_numeric_column('aaa')
        numeric_column_b = sfc.sequence_numeric_column('bbb')

        _, sequence_length = sfc.sequence_input_layer(
            features={
                'aaa': sparse_input_a,
                'bbb': sparse_input_b,
            },
            feature_columns=[numeric_column_a, numeric_column_b])

        with monitored_session.MonitoredSession() as sess:
            with self.assertRaisesRegexp(
                    errors.InvalidArgumentError,
                    r'\[Condition x == y did not hold element-wise:\] '
                    r'\[x \(sequence_input_layer/aaa/sequence_length:0\) = \] \[2 1\] '
                    r'\[y \(sequence_input_layer/bbb/sequence_length:0\) = \] \[1 1\]'
            ):
                sess.run(sequence_length)
Пример #12
0
def get_monitored_session(checkpoint_path):
    session_creator = monitored_session.ChiefSessionCreator(
        checkpoint_filename_with_path=checkpoint_path,
        # scaffold=scaffold,
        # master=master,
        # config=config
    )
    return monitored_session.MonitoredSession(session_creator=session_creator)
Пример #13
0
def run(checkpoint, batch_size, dataset_name, image_path_pattern):
    images_placeholder, endpoints = create_model(batch_size, dataset_name)
    images_data = load_images(image_path_pattern, batch_size, dataset_name)
    session_creator = monitored_session.ChiefSessionCreator(
        checkpoint_filename_with_path=checkpoint)
    with monitored_session.MonitoredSession(
            session_creator=session_creator) as sess:
        predictions = sess.run(endpoints.predicted_text,
                               feed_dict={images_placeholder: images_data})
    return [pr_bytes.decode('utf-8') for pr_bytes in predictions.tolist()]
Пример #14
0
def outliers_detection(checkpoint_dir):
    """Find outliers using Euclidean distance in the last dense layer.
    
    Parameters:
        checkpoint_dir: Checkpoint of the saved model during training.
    """
    with tf.Graph().as_default():
        config = _CONFIG.copy()
        config['mode'] = 'validation'
        model = DeepSentiment(config)

        # Load model
        checkpoint_path = tf_saver.latest_checkpoint(checkpoint_dir)
        scaffold = monitored_session.Scaffold(init_op=None,
                                              init_feed_dict=None,
                                              init_fn=None,
                                              saver=None)
        session_creator = monitored_session.ChiefSessionCreator(
            scaffold=scaffold,
            checkpoint_filename_with_path=checkpoint_path,
            master='',
            config=None)

        im_features_size = config['im_features_size']
        rnn_size = config['rnn_size']
        dense_mean = np.zeros((im_features_size + rnn_size))
        with monitored_session.MonitoredSession(  # Generate queue
                session_creator=session_creator, hooks=None) as session:
            batch_size = config['batch_size']
            nb_batches = model.dataset.num_samples / batch_size
            for i in range(nb_batches):
                current_dense = session.run(model.concat_features)
                weight = float(i) * batch_size / ((i + 1) * batch_size)
                dense_mean = weight * dense_mean + (
                    1 - weight) * current_dense.mean(axis=0)

            # Now look at outliers
            max_norms = np.zeros((batch_size))
            max_post_ids = np.zeros((batch_size))
            max_logits = np.zeros((batch_size, model.dataset.num_classes))
            for i in range(nb_batches):
                current_dense, np_post_ids, current_logits = session.run(
                    [model.concat_features, model.post_ids, model.logits])
                current_diff = np.linalg.norm(current_dense - dense_mean,
                                              axis=1)
                for k in range(batch_size):
                    if current_diff[k] > max_norms[k]:
                        max_norms[k] = current_diff[k]
                        max_post_ids[k] = np_post_ids[k]
                        max_logits[k] = current_logits[k]

    np.save('data/max_norms.npy', max_norms)
    np.save('data/max_post_ids.npy', max_post_ids)
    np.save('data/max_logits.npy', max_logits)
    return max_norms, max_post_ids, max_logits
    def test_get_sequence_dense_tensor(self):
        vocabulary_size = 3
        sparse_input = sparse_tensor.SparseTensorValue(
            # example 0, ids [2]
            # example 1, ids [0, 1]
            # example 2, ids []
            # example 3, ids [1]
            indices=((0, 0), (1, 0), (1, 1), (3, 0)),
            values=(2, 0, 1, 1),
            dense_shape=(4, 2))

        embedding_dimension = 2
        embedding_values = (
            (1., 2.),  # id 0
            (3., 5.),  # id 1
            (7., 11.)  # id 2
        )

        def _initializer(shape, dtype, partition_info):
            self.assertAllEqual((vocabulary_size, embedding_dimension), shape)
            self.assertEqual(dtypes.float32, dtype)
            self.assertIsNone(partition_info)
            return embedding_values

        expected_lookups = [
            # example 0, ids [2]
            [[7., 11.], [0., 0.]],
            # example 1, ids [0, 1]
            [[1., 2.], [3., 5.]],
            # example 2, ids []
            [[0., 0.], [0., 0.]],
            # example 3, ids [1]
            [[3., 5.], [0., 0.]],
        ]

        categorical_column = sfc.sequence_categorical_column_with_identity(
            key='aaa', num_buckets=vocabulary_size)
        embedding_column = sfc._sequence_embedding_column(
            categorical_column,
            dimension=embedding_dimension,
            initializer=_initializer)

        embedding_lookup, _ = embedding_column._get_sequence_dense_tensor(
            _LazyBuilder({'aaa': sparse_input}))

        global_vars = ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
        self.assertItemsEqual(('embedding_weights:0', ),
                              tuple([v.name for v in global_vars]))
        with monitored_session.MonitoredSession() as sess:
            self.assertAllEqual(embedding_values,
                                global_vars[0].eval(session=sess))
            self.assertAllEqual(expected_lookups,
                                embedding_lookup.eval(session=sess))
Пример #16
0
def run(checkpoint, batch_size, dataset_name, images_data, labels):
    images_placeholder, labels_placeholder, endpoints = create_model(
        batch_size, dataset_name)
    session_creator = monitored_session.ChiefSessionCreator(
        checkpoint_filename_with_path=checkpoint)
    with monitored_session.MonitoredSession(
            session_creator=session_creator) as sess:
        prob = sess.run(endpoints,
                        feed_dict={
                            images_placeholder: images_data,
                            labels_placeholder: labels
                        })
    return prob
Пример #17
0
 def testStartQueueRunnersIgnoresMonitoredSession(self):
   zero64 = constant_op.constant(0, dtype=dtypes.int64)
   var = variables.VariableV1(zero64)
   count_up_to = var.count_up_to(3)
   queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
   init_op = variables.global_variables_initializer()
   qr = queue_runner_impl.QueueRunner(queue, [count_up_to])
   queue_runner_impl.add_queue_runner(qr)
   with self.cached_session():
     init_op.run()
     threads = queue_runner_impl.start_queue_runners(
         monitored_session.MonitoredSession())
     self.assertFalse(threads)
  def test_sequence_length_with_zeros(self):
    column = sfc.sequence_categorical_column_with_identity(
        'aaa', num_buckets=3)
    inputs = sparse_tensor.SparseTensorValue(
        indices=((1, 0), (3, 0), (3, 1)),
        values=(1, 2, 0),
        dense_shape=(5, 2))
    expected_sequence_length = [0, 1, 0, 2, 0]

    sequence_length = column._sequence_length(_LazyBuilder({'aaa': inputs}))

    with monitored_session.MonitoredSession() as sess:
      self.assertAllEqual(
          expected_sequence_length, sequence_length.eval(session=sess))
Пример #19
0
def run(checkpoint, batch_size, dataset_name, image_path_pattern):
    images_placeholder, endpoints = create_model(batch_size, dataset_name)
    images_data, paths = load_images(image_path_pattern, batch_size,
                                     dataset_name)
    session_creator = monitored_session.ChiefSessionCreator(
        checkpoint_filename_with_path=checkpoint)

    import time
    stime = time.time()
    with monitored_session.MonitoredSession(
            session_creator=session_creator) as sess:
        predictions = sess.run(endpoints.predicted_text,
                               feed_dict={images_placeholder: images_data})
    print('Running time: ', time.time() - stime)
    return predictions.tolist(), paths
  def test_sequence_length_with_empty_rows(self):
    """Tests _sequence_length when some examples do not have ids."""
    vocabulary_size = 3
    sparse_input_a = sparse_tensor.SparseTensorValue(
        # example 0, ids []
        # example 1, ids [2]
        # example 2, ids [0, 1]
        # example 3, ids []
        # example 4, ids [1]
        # example 5, ids []
        indices=((1, 0), (2, 0), (2, 1), (4, 0)),
        values=(2, 0, 1, 1),
        dense_shape=(6, 2))
    expected_sequence_length_a = [0, 1, 2, 0, 1, 0]
    categorical_column_a = sfc.sequence_categorical_column_with_identity(
        key='aaa', num_buckets=vocabulary_size)

    sparse_input_b = sparse_tensor.SparseTensorValue(
        # example 0, ids [2]
        # example 1, ids []
        # example 2, ids []
        # example 3, ids []
        # example 4, ids [1]
        # example 5, ids [0, 1]
        indices=((0, 0), (4, 0), (5, 0), (5, 1)),
        values=(2, 1, 0, 1),
        dense_shape=(6, 2))
    expected_sequence_length_b = [1, 0, 0, 0, 1, 2]
    categorical_column_b = sfc.sequence_categorical_column_with_identity(
        key='bbb', num_buckets=vocabulary_size)

    shared_embedding_columns = fc.shared_embedding_columns(
        [categorical_column_a, categorical_column_b], dimension=2)

    sequence_length_a = shared_embedding_columns[0]._get_sequence_dense_tensor(
        _LazyBuilder({
            'aaa': sparse_input_a
        }))[1]
    sequence_length_b = shared_embedding_columns[1]._get_sequence_dense_tensor(
        _LazyBuilder({
            'bbb': sparse_input_b
        }))[1]

    with monitored_session.MonitoredSession() as sess:
      self.assertAllEqual(
          expected_sequence_length_a, sequence_length_a.eval(session=sess))
      self.assertAllEqual(
          expected_sequence_length_b, sequence_length_b.eval(session=sess))
    def test_sequence_length(self):
        column = sfc.sequence_categorical_column_with_vocabulary_list(
            key='aaa', vocabulary_list=('omar', 'stringer', 'marlo'))
        inputs = sparse_tensor.SparseTensorValue(indices=((0, 0), (1, 0), (1,
                                                                           1)),
                                                 values=('marlo', 'skywalker',
                                                         'omar'),
                                                 dense_shape=(2, 2))
        expected_sequence_length = [1, 2]

        sequence_length = column._sequence_length(_LazyBuilder({'aaa':
                                                                inputs}))

        with monitored_session.MonitoredSession() as sess:
            self.assertAllEqual(expected_sequence_length,
                                sequence_length.eval(session=sess))
    def test_sequence_length(self):
        column = sfc.sequence_categorical_column_with_identity('aaa',
                                                               num_buckets=3)
        inputs = sparse_tensor.SparseTensorValue(indices=((0, 0), (1, 0), (1,
                                                                           1)),
                                                 values=(1, 2, 0),
                                                 dense_shape=(2, 2))
        expected_sequence_length = [1, 2]

        sequence_length = column._sequence_length(_LazyBuilder({'aaa':
                                                                inputs}))

        with monitored_session.MonitoredSession() as sess:
            sequence_length = sess.run(sequence_length)
            self.assertAllEqual(expected_sequence_length, sequence_length)
            self.assertEqual(np.int64, sequence_length.dtype)
Пример #23
0
def day_of_week_trend(checkpoint_dir):
    """Compute day of week trend.
    
    Parameters:
        checkpoint_dir: Checkpoint of the saved model during training.
    """
    with tf.Graph().as_default():
        config = _CONFIG.copy()
        config['mode'] = 'validation'
        model = DeepSentiment(config)

        # Load model
        checkpoint_path = tf_saver.latest_checkpoint(checkpoint_dir)
        scaffold = monitored_session.Scaffold(init_op=None,
                                              init_feed_dict=None,
                                              init_fn=None,
                                              saver=None)
        session_creator = monitored_session.ChiefSessionCreator(
            scaffold=scaffold,
            checkpoint_filename_with_path=checkpoint_path,
            master='',
            config=None)

        posts_logits = []
        posts_labels = []
        posts_days = []
        posts_ids = []
        with monitored_session.MonitoredSession(  # Generate queue
                session_creator=session_creator, hooks=None) as session:
            batch_size = config['batch_size']
            nb_batches = model.dataset.num_samples / batch_size
            for i in range(nb_batches):
                np_logits, np_labels, np_days, np_post_ids = session.run(
                    [model.logits, model.labels, model.days, model.post_ids])
                posts_logits.append(np_logits)
                posts_labels.append(np_labels)
                posts_days.append(np_days)
                posts_ids.append(np_post_ids)

    posts_logits, posts_labels = np.vstack(posts_logits), np.hstack(
        posts_labels)
    posts_days, posts_ids = np.hstack(posts_days), np.hstack(posts_ids)
    np.save('data/posts_logits_week.npy', posts_logits)
    np.save('data/posts_labels_week.npy', posts_labels)
    np.save('data/posts_days_week.npy', posts_days)
    np.save('data/posts_ids_week.npy', posts_ids)
    return posts_logits, posts_labels, posts_days, posts_ids
    def _predict(self, run_ctx, step):
        var_name_to_value = run_ctx.session.run(self._var_name_to_train_var)
        logging.info('Building placeholders.')
        placeholder_to_value = {
            self._var_name_to_placeholder[v_name]: var_name_to_value[v_name]
            for v_name in var_name_to_value
        }

        def feed_variables(scaffold, session):
            del scaffold
            session.run(self._var_feed_op, feed_dict=placeholder_to_value)

        logging.info('Building scaffold.')
        scaffold = training.Scaffold(init_fn=feed_variables)

        with self._graph.as_default():
            session_creator = monitored_session.ChiefSessionCreator(
                scaffold=scaffold,
                checkpoint_filename_with_path=None,
                master=run_ctx.session.sess_str)

            self._handler.setup(step)
            logging.info('Setup done.')
            with monitored_session.MonitoredSession(
                    session_creator=session_creator,
                    hooks=self._all_hooks) as predict_session:
                while not predict_session.should_stop():
                    logging.info('Predicting.... %s', self._predictions)
                    preds_evaluated = predict_session.run(self._predictions)
                    if not isinstance(self._predictions, dict):
                        for pred in preds_evaluated:
                            self._handler.handle_prediction(pred)
                    else:
                        for i in range(
                                self._estimator._extract_batch_length(
                                    preds_evaluated)):
                            self._handler.handle_prediction({
                                key: value[i]
                                for key, value in six.iteritems(
                                    preds_evaluated)
                            })

            logging.info('Finalizing.')
            self._handler.finalize(step)

        logging.info('Done with prediction.')
        self._timer.update_last_triggered_step(step)
    def __init__(self,
                 estimator,
                 prediction_input_fn,
                 input_alternative_key=None,
                 output_alternative_key=None,
                 graph=None,
                 config=None):
        """Initialize a `ContribEstimatorPredictor`.

    Args:
      estimator: an instance of `tf.contrib.learn.Estimator`.
      prediction_input_fn: a function that takes no arguments and returns an
        instance of `InputFnOps`.
      input_alternative_key: Optional. Specify the input alternative used for
        prediction.
      output_alternative_key: Specify the output alternative used for
        prediction. Not needed for single-headed models but required for
        multi-headed models.
      graph: Optional. The Tensorflow `graph` in which prediction should be
        done.
      config: `ConfigProto` proto used to configure the session.
    """
        self._graph = graph or ops.Graph()
        with self._graph.as_default():
            input_fn_ops = prediction_input_fn()
            # pylint: disable=protected-access
            model_fn_ops = estimator._get_predict_ops(input_fn_ops.features)
            # pylint: enable=protected-access
            checkpoint_path = checkpoint_management.latest_checkpoint(
                estimator.model_dir)
            self._session = monitored_session.MonitoredSession(
                session_creator=monitored_session.ChiefSessionCreator(
                    config=config,
                    checkpoint_filename_with_path=checkpoint_path))

        input_alternative_key = (
            input_alternative_key
            or saved_model_export_utils.DEFAULT_INPUT_ALTERNATIVE_KEY)
        input_alternatives, _ = saved_model_export_utils.get_input_alternatives(
            input_fn_ops)
        self._feed_tensors = input_alternatives[input_alternative_key]

        (output_alternatives, output_alternative_key
         ) = saved_model_export_utils.get_output_alternatives(
             model_fn_ops, output_alternative_key)
        _, fetch_tensors = output_alternatives[output_alternative_key]
        self._fetch_tensors = fetch_tensors
    def test_sequence_length_with_shape(self):
        """Tests _sequence_length with shape !=(1,)."""
        sparse_input = sparse_tensor.SparseTensorValue(
            # example 0, values [[0.], [1]]
            # example 1, [[10.]]
            indices=((0, 0), (0, 1), (1, 0)),
            values=(0., 1., 10.),
            dense_shape=(2, 2))
        expected_sequence_length = [2, 1]
        numeric_column = sfc.sequence_numeric_column('aaa')

        _, sequence_length = numeric_column._get_sequence_dense_tensor(
            _LazyBuilder({'aaa': sparse_input}))

        with monitored_session.MonitoredSession() as sess:
            self.assertAllEqual(expected_sequence_length,
                                sequence_length.eval(session=sess))
Пример #27
0
def main():

    height, width, channel = get_dataset_image_shape(FLAGS.dataset_name)
    images_placeholder, endpoints = create_model(FLAGS.dataset_name)
    session_creator = monitored_session.ChiefSessionCreator(
        checkpoint_filename_with_path=
        '/media/dont/data/dont/datasets/crnn/tfname/logs/model.ckpt-110665')
    image = PIL.Image.open(sys.argv[1]).convert('RGB')
    #image = image.resize((width, height), PIL.Image.ANTIALIAS)
    images_data = np.expand_dims(np.asarray(image), axis=0)
    sess = monitored_session.MonitoredSession(session_creator=session_creator)
    start = timeit.default_timer()
    predictions = sess.run(endpoints.predicted_text,
                           feed_dict={images_placeholder: images_data})
    print(predictions[0].decode('utf8'))
    stop = timeit.default_timer()
    print('Time : ', stop - start)
Пример #28
0
    def test_sequence_length(self):
        sparse_input = sparse_tensor.SparseTensorValue(
            # example 0, values [[0., 1., 2.], [3., 4., 5.]]
            # example 1, [[10., 11., 12.]]
            indices=((0, 0), (0, 1), (0, 2), (0, 3), (0, 4), (0, 5), (1, 0),
                     (1, 1), (1, 2)),
            values=(0., 1., 2., 3., 4., 5., 10., 11., 12.),
            dense_shape=(2, 6))
        expected_sequence_length = [2, 1]
        numeric_column = sfc.sequence_numeric_column('aaa', shape=(3, ))

        _, sequence_length = numeric_column._get_sequence_dense_tensor(
            _LazyBuilder({'aaa': sparse_input}))

        with monitored_session.MonitoredSession() as sess:
            self.assertAllEqual(expected_sequence_length,
                                sequence_length.eval(session=sess))
Пример #29
0
  def test_table_in_graph(self, cycles):
    root = self._make_model_with_tables()

    if cycles > 1:
      root = self.cycle(root, cycles - 1)
    path = tempfile.mkdtemp(prefix=self.get_temp_dir())
    save.save(root, path)
    imported = self.cycle(root, 1)

    with ops.Graph().as_default():
      imported = load.load(path)
      keys = constant_op.constant(["brain", "test", "foo", "surgery"])
      output1 = imported.lookup1(keys)
      output2 = imported.lookup2(keys)
      with monitored_session.MonitoredSession() as sess:
        self.assertAllEqual([0, -1, -1, 2], sess.run(output1))
        self.assertAllEqual([2, 0, 1, -1], sess.run(output2))
    def test_get_sparse_tensors_inputs3d(self):
        """Tests _get_sparse_tensors when the input is already 3D Tensor."""
        column = sfc.sequence_categorical_column_with_identity('aaa',
                                                               num_buckets=3)
        inputs = sparse_tensor.SparseTensorValue(indices=((0, 0, 0), (1, 0, 0),
                                                          (1, 1, 0)),
                                                 values=(1, 2, 0),
                                                 dense_shape=(2, 2, 1))

        with self.assertRaisesRegexp(
                errors.InvalidArgumentError,
                r'Column aaa expected ID tensor of rank 2\.\s*'
                r'id_tensor shape:\s*\[2 2 1\]'):
            id_weight_pair = column._get_sparse_tensors(
                _LazyBuilder({'aaa': inputs}))
            with monitored_session.MonitoredSession() as sess:
                id_weight_pair.id_tensor.eval(session=sess)