Ejemplo n.º 1
0
def infer():
    """ Given a model handle and input values, this def runs the model inference graph and returns the predictions.
  Args: Model handle, input values.
  Returns: A JSON containing all the model predictions.
  """
    args = {k: v for k, v in request.forms.iteritems()}
    print(args)
    # clear_session()  # Clears TF graphs.
    clear_session()  # Clears TF graphs.
    clear_thread_cache(
    )  # We need to clear keras models since graph is deleted.
    try:
        model = get_model(args['handle'])
    except Exception as e:
        return json.dumps({
            'status': 'ERROR',
            'why': 'Infer: Model probably not found ' + str(e)
        })

    if 'values' not in args:
        return json.dumps({'status': 'ERROR', 'why': 'No values specified'})
    print(args['handle'])
    print(args['values'])
    outputs = model.infer(json.loads(args['values']))
    return json.dumps({'status': 'OK', 'result': outputs})
Ejemplo n.º 2
0
def train_status():
    """ Grabs the metrics from disk and returns them for the given model handle.
  Args: Model handle.
  Returns: A JSON with a dictionary of keras_model_name -> metric_name -> list(metric values)
  """
    args = {k: v for k, v in request.forms.iteritems()}
    try:
        model = get_model(args['handle'])
    except:
        return json.dumps({
            'status': 'ERROR',
            'why': 'Model probably not found'
        })
    if not model:
        return json.dumps({'status': 'ERROR', 'why': 'Concurrency error'})
    if model.status == ModelStatus.TRAINED:
        return json.dumps({'status': 'DONE'})
    losses = {}
    for model_name, value_dict in model.val_losses.iteritems():
        for metric_name, vals in value_dict.iteritems():
            if model_name in losses:
                losses[model_name][metric_name] = [
                    handleNaN(x) for x in vals[-LOSS_LENGTH:]
                ]
            else:
                losses[model_name] = {
                    metric_name: [handleNaN(x) for x in vals[-LOSS_LENGTH:]]
                }
    return json.dumps({'status': 'OK', 'val_losses': losses})
Ejemplo n.º 3
0
def infer_types(args, files):
  """ Given a model handle, returns a dictionary of column-name to type JSON.
  Args: Model handle
  Returns: a JSON holding the column-name to type map.
  """
  try:
    model = get_model(args['handle'])
  except Exception as e:
    return json.dumps({'status': 'ERROR', 'why': 'Model probably not found '  + str(e)})
  return json.dumps({'status': 'OK', 'types': model.types})
Ejemplo n.º 4
0
    def get(self, model_id):
        if self.cache.get(model_id, None):
            result = get_model(model_id)
            if result is None:
                return None
            if len(self.cache.keys()) == model_cache_size:
                #evict the first model
                self.cache.pop(self.cache.keys()[0])
                self.cache["model_id"] = Model(model_id, result["i"],
                                               result["j"], result["k"])

        return self.cache.get(model_id, None)
Ejemplo n.º 5
0
def train(args, files):
  """ Runs the training for the given model.
  Args: Model handle.
  Returns: A JSON confirming that training has been kicked-off.
  """
  clear_session()  # Clears TF graphs.
  clear_thread_cache()  # We need to clear keras models since graph is deleted.
  try:
    model = get_model(args['handle'])
  except:
    return json.dumps({'status': 'ERROR', 'why': 'Model probably not found'})
  model.start_training()
  return json.dumps({'status': 'OK', 'handle': model.get_handle()})