コード例 #1
0
  def save(self, path, dependency_path, aws_credentials = {}):
    """ Save predictive object to the given path

    Parameters
    ----------
    path : str
      The location to save the predictive object to
    """
    # only support saving to local or S3 for now
    if (not fu.is_s3_path(path)) and (not fu.is_local_path(path)):
      raise RuntimeError("Only save to local and S3 path is supported, cannot \
        save predictive object to path %s. " % path)

    if (fu.is_s3_path(path)):
      self._save_s3(path, dependency_path, aws_credentials)
    else:
      if os.path.exists(path):
        if os.path.isfile(path):
          __logger__.warning("Overwriting existing file '%s' when saving predictive object" % path)
        else:
          raise RuntimeError("Path %s already exists, please remove that and save again" % path)

      self._save_local(path, dependency_path, aws_credentials)

      tracker = _mt._get_metric_tracker()
      tracker.track('deploy.predictive_service.predictive_object',
          value=1,
          properties={
          'type': self.__class__.__name__,
          'dependencies': len(self.dependencies) if self.dependencies else 0}
      )
コード例 #2
0
  def load(cls, path):
    """ Load predictive object from given path
    """
    new_po = None
    if (fu.is_s3_path(path)):
      new_po = cls._load_s3(path)
    else:
      new_po = cls._load_local(path)

    # call derived class post_load
    new_po.post_load()
    return new_po
コード例 #3
0
  def _save_imp(self, po_path, dependency_path, aws_credentials):
    '''Save the predictive object to a directory

      The files for a predictive object are laid out the following way:

        po_path/definition/meta -- serialized json file about the predictive
          object, including: description, dependencies, etc.
        po_path/definition/definition -- cloudpickle-serialized PredictiveObject
        dependency_path -- all dependent GraphLab objects, each in its
          own directory:
          dependency_path/uri1/ -- serialized GraphLab object with uri1
          dependency_path/uri2/ -- serialized GraphLab object with uri2
      '''
    fu.create_directory(po_path)

    describe = {
      'description': self.description,
      'dependencies': {},
      'schema_version' : self.schema_version
    }

    for (uri, gl_obj) in self.dependencies.iteritems():

      # If it isn't already saved, save it.
      temp_path = None
      try:
        if not fu.is_path(gl_obj):
          obj_type = self._get_graphlab_object_type(gl_obj)
          temp_path = tempfile.mkdtemp()

          __logger__.info("Saving dependent GraphLab %s (%s) locally to '%s' " % (obj_type, uri, temp_path))
          gl_obj.save(temp_path)
          gl_obj = temp_path
        else:
          obj_type = get_graphlab_object_type(gl_obj)

        # Copy the saved object without loading it.
        save_path = os.path.join(dependency_path, uri)

        __logger__.info("Copying dependent GraphLab %s(%s) from '%s' to '%s' " % (obj_type, uri, gl_obj, save_path))

        if fu.is_s3_path(gl_obj) and fu.is_s3_path(save_path):
          fu.intra_s3_copy_model(gl_obj, save_path, aws_credentials)
        elif fu.is_local_path(gl_obj) and fu.is_s3_path(save_path):
          fu.s3_copy_model(gl_obj, save_path, aws_credentials)
        elif fu.is_local_path(gl_obj) and fu.is_local_path(save_path):
          # Useful for unit tests
          shutil.copytree(gl_obj, save_path)
        else:
          raise RuntimeError("Copy GraphLab object from S3 to local path is not supported. GraphLab object path: %s, save path: %s" % (gl_obj, save_path))
      finally:
        if temp_path:
          shutil.rmtree(temp_path)

      # add to the global describe dictionary
      describe['dependencies'][uri] = {
        'path': save_path,
        'type': obj_type
      }

    # persist the global description
    describe_path = self._get_describe_path(po_path)
    self._save_object(describe_path, describe)

    # persist the definition of myself
    definition_path = self._get_definition_path(po_path)
    try:
      with open(definition_path, 'wb') as f:
        _cloudpickle.dump(self, f)
    except Exception as e:
      __logger__.error('Unable to save object: %s' % (e.message))
      raise e