Example #1
0
def local_predict(args):
    """Runs prediction locally."""

    session, _ = session_bundle.load_session_bundle_from_path(args.model_dir)
    # get the mappings between aliases and tensor names
    # for both inputs and outputs
    input_alias_map = json.loads(session.graph.get_collection('inputs')[0])
    output_alias_map = json.loads(session.graph.get_collection('outputs')[0])
    aliases, tensor_names = zip(*output_alias_map.items())

    for input_file in args.input:
        with open(input_file) as f:
            feed_dict = collections.defaultdict(list)
            for line in f:
                for k, v in json.loads(line).iteritems():
                    feed_dict[input_alias_map[k]].append(v)
            if args.dry_run:
                print 'Feed data dict %s to graph and fetch %s' % (
                    feed_dict, tensor_names)
            else:
                result = session.run(fetches=tensor_names, feed_dict=feed_dict)
                for row in zip(*result):
                    print json.dumps({
                        name: (value.tolist() if getattr(
                            value, 'tolist', None) else value)
                        for name, value in zip(aliases, row)
                    })
Example #2
0
def local_predict(input_data, model_dir):
    """Runs prediction locally.

  Args:
    input_data: list of input files to run prediction on.
    model_dir: path to Tensorflow model folder.
  """

    session, _ = session_bundle.load_session_bundle_from_path(model_dir)
    # get the mappings between aliases and tensor names
    # for both inputs and outputs
    input_alias_map = json.loads(session.graph.get_collection('inputs')[0])
    output_alias_map = json.loads(session.graph.get_collection('outputs')[0])
    aliases, tensor_names = zip(*output_alias_map.items())

    metadata_path = os.path.join(model_dir, 'metadata.yaml')
    transformer = features.FeatureProducer(metadata_path)
    for input_file in input_data:
        with open(input_file) as f:
            feed_dict = collections.defaultdict(list)
            for line in f:
                preprocessed = transformer.preprocess(line)
                feed_dict[input_alias_map.values()[0]].append(
                    preprocessed.SerializeToString())
            result = session.run(fetches=tensor_names, feed_dict=feed_dict)
            for row in zip(*result):
                print json.dumps({
                    name: (value.tolist()
                           if getattr(value, 'tolist', None) else value)
                    for name, value in zip(aliases, row)
                })
Example #3
0
def local_predict(args):
  """Runs prediction locally."""

  session, _ = session_bundle.load_session_bundle_from_path(args.model_dir)
  # get the mappings between aliases and tensor names
  # for both inputs and outputs
  input_alias_map = json.loads(session.graph.get_collection('inputs')[0])
  output_alias_map = json.loads(session.graph.get_collection('outputs')[0])
  aliases, tensor_names = zip(*output_alias_map.items())

  metadata_path = os.path.join(args.model_dir, 'metadata.yaml')
  transformer = features.FeatureProducer(metadata_path)
  for input_file in args.input:
    with open(input_file) as f:
      feed_dict = collections.defaultdict(list)
      for line in f:
        preprocessed = transformer.preprocess(line)
        feed_dict[input_alias_map.values()[0]].append(
            preprocessed.SerializeToString())
      if args.dry_run:
        print 'Feed data dict %s to graph and fetch %s' % (
            feed_dict, tensor_names)
      else:
        result = session.run(fetches=tensor_names, feed_dict=feed_dict)
        for row in zip(*result):
          print json.dumps({name: value.tolist()
                            for name, value in zip(aliases, row)})
Example #4
0
def load_model(model_path, config=None, **unused_kwargs):
  """Loads the model at the specified path.

  Args:
    model_path: the path to either session_bundle or SavedModel
    config: tf.ConfigProto containing session configuration options.
    unused_kwargs: kwargs for compatiblity purpose.

  Returns:
    A pair of (Session, SignatureDef) objects.

  Raises:
    PredictionError: if the model could not be loaded.
  """
  # Ideally, we could just always use bundle_shim to load legacy and
  # regular graphs. However, bundle_shim and supporting functions are
  # only available on recent versions of TF (~0.12). It's not even
  # really possible to detect whether or not we're going to be able
  # to use these functions, so in true Python function, it's better
  # to ask forgiveness than permission...we try to import bundle_shim,
  # which may fail, then we try to use bundle_shim, which may also fail
  # for legacy graphs. In other failure case, we back off to our older
  # custom session_bundle implementation.
  try:
    from tensorflow.contrib.session_bundle import bundle_shim  # pylint: disable=g-import-not-at-top
    from tensorflow.python.saved_model import tag_constants  # pylint: disable=g-import-not-at-top
    # We expect that the customer will export saved model and use
    # tag_constants.SERVING for serving graph. This assumption also extends to
    # model server.
    session, meta_graph = (
        bundle_shim.load_session_bundle_or_saved_model_bundle_from_path(
            model_path, tags=[tag_constants.SERVING], config=config))
  except Exception:  # pylint: disable=broad-except
    session, meta_graph = session_bundle.load_session_bundle_from_path(
        model_path, config=config)

  if session is None:
    raise PredictionError(PredictionError.FAILED_TO_LOAD_MODEL,
                          "Could not load model from %s" % model_path)

  # Before the SavedModel spec came into existence the inputs and outputs
  # of a model were specified using TensorFlow collections. Check if this model
  # uses that spec.
  graph = session.graph
  collection_keys = graph.get_all_collection_keys()
  if INPUTS_KEY in collection_keys and OUTPUTS_KEY in collection_keys:
    signature = _get_legacy_signature(graph)
  else:
    # Otherwise, use (possibly upgraded from session_bundle) SavedModel.
    signature = _get_signature_from_meta_graph(graph, meta_graph)

  return session, signature
Example #5
0
def local_predict(args):
  """Runs prediction locally."""

  session, _ = session_bundle.load_session_bundle_from_path(args.model_dir)
  # get the mappings between aliases and tensor names
  # for both inputs and outputs
  input_alias_map = json.loads(session.graph.get_collection('inputs')[0])
  output_alias_map = json.loads(session.graph.get_collection('outputs')[0])
  aliases, tensor_names = zip(*output_alias_map.items())

  for input_file in args.input:
    feed_dict = collections.defaultdict(list)
    for line in tf_record.tf_record_iterator(input_file):
      feed_dict[input_alias_map['examples_bytes']].append(line)

    if args.dry_run:
      print 'Feed data dict %s to graph and fetch %s' % (
          feed_dict, tensor_names)
    else:
      result = session.run(fetches=tensor_names, feed_dict=feed_dict)
      for row in zip(*result):
        print json.dumps(
            {name: (value.tolist() if getattr(value, 'tolist', None) else value)
             for name, value in zip(aliases, row)})
Example #6
0
def local_predict(args):
  """Runs prediction locally."""

  session, _ = session_bundle.load_session_bundle_from_path(args.model_dir)
  # get the mappings between aliases and tensor names
  # for both inputs and outputs
  input_alias_map = json.loads(session.graph.get_collection('inputs')[0])
  output_alias_map = json.loads(session.graph.get_collection('outputs')[0])
  aliases, tensor_names = zip(*output_alias_map.items())

  for input_file in args.input:
    with open(input_file) as f:
      feed_dict = collections.defaultdict(list)
      for line in f:
        for k, v in json.loads(line).iteritems():
          feed_dict[input_alias_map[k]].append(v)
      if args.dry_run:
        print 'Feed data dict %s to graph and fetch %s' % (
            feed_dict, tensor_names)
      else:
        result = session.run(fetches=tensor_names, feed_dict=feed_dict)
        for row in zip(*result):
          print json.dumps({name: value.tolist()
                            for name, value in zip(aliases, row)})
Example #7
0
    for line in f:
        label, c1, c2, c3 = line.rstrip().split(' ')
        #convert labels into onehot encoding
        onehot = np.zeros(n_classes)
        onehot[labelmap[label]] = 1.0
        labels.append(onehot)
        #create absolute paths for image files
        filenames.append([testdir + '/' + c for c in (c1, c2, c3)])

    return zip(labels, filenames)


if __name__ == '__main__':
    args = parse_args()

    session, _ = session_bundle.load_session_bundle_from_path(args.model_dir)
    # get the mappings between aliases and tensor names
    # for both inputs and outputs
    input_alias_map = json.loads(session.graph.get_collection('inputs')[0])
    output_alias_map = json.loads(session.graph.get_collection('outputs')[0])
    aliases, tensor_names = zip(*output_alias_map.items())
    examples = read_test_list(args.test_dir)
    start_time = time.time()
    y_true = []
    y_pred = []
    for (label, files) in examples:
        channels = [misc.imread(file_io.FileIO(f, 'r')) for f in files]
        image = np.dstack(channels)

        feed_dict = {input_alias_map['image']: [image]}
        predict, scores = session.run(fetches=[