예제 #1
0
    def load(cls, bundle, **kwargs):
        """Load a model from a bundle.

        This can be either a local model or a remote, exported model.

        :returns a Service implementation
        """
        # can delegate
        if os.path.isdir(bundle):
            directory = bundle
        else:
            directory = unzip_files(bundle)

        model_basename = find_model_basename(directory)
        vocabs = load_vocabs(directory)
        vectorizers = load_vectorizers(directory)

        be = normalize_backend(kwargs.get('backend', 'tf'))

        remote = kwargs.get("remote", None)
        name = kwargs.get("name", None)
        if remote:
            beam = kwargs.get('beam', 10)
            model = Service._create_remote_model(directory, be, remote, name, cls.signature_name(), beam, preproc=kwargs.get('preproc', False))
            return cls(vocabs, vectorizers, model)

        # Currently nothing to do here
        # labels = read_json(os.path.join(directory, model_basename) + '.labels')

        import_user_module('baseline.{}.embeddings'.format(be))
        import_user_module('baseline.{}.{}'.format(be, cls.task_name()))
        model = load_model_for(cls.task_name(), model_basename, **kwargs)
        return cls(vocabs, vectorizers, model)
예제 #2
0
 def load_model(self, model_dir):
     model_name = find_model_basename(model_dir)
     vectorizers = load_vectorizers(model_dir)
     model = load_model_for(self.task.task_name(), model_name, device='cpu')
     model = model.cpu()
     model.eval()
     model_name = os.path.basename(model_name)
     return model, vectorizers, model_name
예제 #3
0
 def load_model(self, model_dir):
     model_name = find_model_basename(model_dir)
     vectorizers = load_vectorizers(model_dir)
     model = load_model_for(self.task.task_name(), model_name, device='cpu')
     model = model.cpu()
     model.eval()
     model_name = os.path.basename(model_name)
     return model, vectorizers, model_name
예제 #4
0
    def load(cls, bundle, **kwargs):
        """Load a model from a bundle.

        This can be either a local model or a remote, exported model.

        :returns a Service implementation
        """
        # can delegate
        basehead = None

        if os.path.isdir(bundle):
            directory = bundle
        elif os.path.isfile(bundle):
            directory = unzip_files(bundle)
        else:
            directory = os.path.dirname(bundle)
            basehead = os.path.basename(bundle)
        model_basename = find_model_basename(directory, basehead)
        suffix = model_basename.split('-')[-1] + ".json"
        vocabs = load_vocabs(directory, suffix)

        be = normalize_backend(kwargs.get('backend', 'tf'))

        remote = kwargs.get("remote", None)
        name = kwargs.get("name", None)
        if remote:
            logging.debug("loading remote model")
            beam = int(kwargs.get('beam', 30))
            model, preproc = Service._create_remote_model(
                directory,
                be,
                remote,
                name,
                cls.task_name(),
                cls.signature_name(),
                beam,
                preproc=kwargs.get('preproc', 'client'),
                version=kwargs.get('version'),
                remote_type=kwargs.get('remote_type'),
            )
            vectorizers = load_vectorizers(directory)
            return cls(vocabs, vectorizers, model, preproc)

        # Currently nothing to do here
        # labels = read_json(os.path.join(directory, model_basename) + '.labels')

        import_user_module('baseline.{}.embeddings'.format(be))
        try:
            import_user_module('baseline.{}.{}'.format(be, cls.task_name()))
        except:
            pass
        model = load_model_for(cls.task_name(), model_basename, **kwargs)
        vectorizers = load_vectorizers(directory)
        return cls(vocabs, vectorizers, model, 'client')
예제 #5
0
    def load(cls, bundle, **kwargs):
        """Load a model from a bundle.

        This can be either a local model or a remote, exported model.

        :returns a Service implementation
        """
        # can delegate
        if os.path.isdir(bundle):
            directory = bundle
        else:
            directory = unzip_files(bundle)

        model_basename = find_model_basename(directory)
        vocabs = load_vocabs(directory)
        vectorizers = load_vectorizers(directory)

        be = normalize_backend(kwargs.get('backend', 'tf'))

        remote = kwargs.get("remote", None)
        name = kwargs.get("name", None)
        if remote:
            logging.debug("loading remote model")
            beam = kwargs.get('beam', 30)
            model, preproc = Service._create_remote_model(
                directory, be, remote, name, cls.signature_name(), beam,
                preproc=kwargs.get('preproc', 'client'),
                version=kwargs.get('version')
            )
            return cls(vocabs, vectorizers, model, preproc)

        # Currently nothing to do here
        # labels = read_json(os.path.join(directory, model_basename) + '.labels')

        import_user_module('baseline.{}.embeddings'.format(be))
        try:
            import_user_module('baseline.{}.{}'.format(be, cls.task_name()))
        except:
            pass
        model = load_model_for(cls.task_name(), model_basename, **kwargs)
        return cls(vocabs, vectorizers, model, 'client')
def load_servable_embeddings_model(filename, **kwargs):
    return load_model_for('servable_embeddings', filename, **kwargs)