def _requirements(self) -> Requirements: """Union of pipeline, server and loader requirements""" model_reqs = reduce(Requirements.__add__, [m.requirements for m in self.pipeline.models.values()], Requirements()) return (model_reqs + get_object_requirements(self.server) + get_object_requirements(self.loader))
def create(cls, model_object, input_data, model_name: str = None, additional_artifacts: ArtifactCollection = None, additional_requirements: AnyRequirements = None, custom_wrapper: ModelWrapper = None, custom_artifact: ArtifactCollection = None, custom_input_meta: DatasetType = None, custom_output_meta: DatasetType = None, custom_prediction=None, custom_requirements: AnyRequirements = None) -> 'Model': """ Creates Model instance from arbitrary model objects and sample of input data :param model_object: The model object to analyze. :param input_data: The image to run. :param model_name: The model name. :param additional_artifacts: Additional artifact. :param additional_requirements: Additional requirements. :param custom_wrapper: Custom model wrapper. :param custom_artifact: Custom artifact collection to replace all other. :param custom_input_meta: Custom input DatasetType. :param custom_output_meta: Custom output DatasetType. :param custom_prediction: Custom prediction output. :param custom_requirements: Custom requirements to replace all other. :returns: :py:class:`Model` """ wrapper: ModelWrapper = custom_wrapper or ModelAnalyzer.analyze( model_object) name = model_name or _generate_model_name(wrapper) artifact = custom_artifact or WrapperArtifactCollection(wrapper) if additional_artifacts is not None: artifact += additional_artifacts input_meta = custom_input_meta or DatasetAnalyzer.analyze(input_data) prediction = custom_prediction or wrapper.predict(input_data) output_meta = custom_output_meta or DatasetAnalyzer.analyze(prediction) if custom_requirements is not None: requirements = resolve_requirements(custom_requirements) else: requirements = get_object_requirements(model_object) requirements += get_object_requirements(input_data) requirements += get_object_requirements(prediction) if additional_requirements is not None: requirements += additional_requirements model = Model(name, wrapper, None, input_meta, output_meta, requirements) model._unpersisted_artifacts = artifact return model
def from_callable(cls, callable): reqs = get_object_requirements(callable) with PickleModelIO().dump(callable) as artifacts: payload = { path: cls.compress(bts) for path, bts in artifacts.bytes_dict().items() } return CallableMetricWrapper(payload, reqs).bind(callable)
def _model_requirements(self) -> Requirements: """ Should return runtime requirements of bound model. By default auto-detects them via Python interpreter internals. This is not 100% robust so we recommend to re-implement this method in subclasses. :return: :class:`.Requirements` object representing runtime requirements of bound module object """ return get_object_requirements(self)
def test_requirements_analyzer__custom_modules(): import tensorflow # noqa from custom_module import MODEL reqs = get_object_requirements(MODEL) custom_reqs = {req.name for req in reqs.custom} # "test_cases" appears here as this code is imported by pytest # __main__ modules won't appear here assert {'model_trainer', 'custom_module', 'test_cases'} == custom_reqs inst_reqs = {req.package for req in reqs.installable} assert {'scikit-learn', 'pandas', 'six'} == inst_reqs
def test_requirements_analyzer__custom_modules(): import catboost # noqa import unused_code # noqa from proxy_model import model reqs = get_object_requirements(model) custom_reqs = {req.name for req in reqs.custom} # "test_cases" appears here as this code is imported by pytest # __main__ modules won't appear here assert {'model_trainer', 'proxy_pkg_import', 'pkg_import', 'pkg'} == custom_reqs inst_reqs = {req.package for req in reqs.installable} assert {'scikit-learn', 'six', 'isort'} == inst_reqs
def test_requirements_analyzer__model_works(tmpdir): from proxy_model import model reqs = get_object_requirements(model) for r in reqs.custom: for p, src in r.to_sources_dict().items(): join = os.path.join(tmpdir, p) os.makedirs(os.path.dirname(join), exist_ok=True) with open(join, 'w') as f: f.write(src) with open(os.path.join(tmpdir, 'model.pkl'), 'wb') as f: dill.dump(model, f) shutil.copy(fs.current_module_path('use_model.py'), tmpdir) cp = subprocess.run('python use_model.py', shell=True, cwd=tmpdir) assert cp.returncode == 0
def _requirements(self) -> Requirements: """Union of model, server and loader requirements""" return (self.model.requirements + get_object_requirements(self.server) + get_object_requirements(self.loader))
def get_requirements(self, obj) -> Requirements: return get_object_requirements(obj)
def _requirements(self) -> Requirements: """Union of server, loader and all models requirements""" return (get_object_requirements(self.server) + get_object_requirements(self.loader) + sum( (model.requirements for model in self.models), Requirements()))
def test_libgomp(wrapper): req = get_object_requirements(wrapper) assert RequirementAnalyzer.analyze(req).of_type( UnixPackageRequirement) == [UnixPackageRequirement('libgomp1')]
def _requirements(self) -> Requirements: """Union of server, loader and all models requirements""" return (get_object_requirements(self.server) + get_object_requirements(self.loader) + [_ for model in self.models for _ in model.params.requirements])