コード例 #1
0
    def new(self,
            repo=None,
            models: list = None,
            home_dir: str = None,
            **kwargs):

        if models:
            finder = Model().find_one
            models = [finder(name=model_name) for model_name in models]
        else:
            models = []
            LOG.warn(
                "No models specified for the new project. "
                "When publishing a model version this project must specify its model name."
            )

        if home_dir is None:
            LOG.warn("No home directory was specified for the new project.")
            if self._decide(
                    "Would you like to use the current working directory?",
                    default=False):
                home_dir = os.getcwd()
                LOG.info("Using as home directory: {}".format(home_dir))

        return super().new(home_dir=None if home_dir is None else
                           LocalRepository(home_dir).address,
                           models=models,
                           **kwargs)
コード例 #2
0
ファイル: shipyard.py プロジェクト: noronha-dataops/noronha
    def build(self, nocache: bool = False):

        work_path = None

        try:
            LOG.info("Building {} from {}".format(self.img_spec.target,
                                                  self.repo.address))
            work_path = self.make_work_path()
            build_path = self.deploy_source(work_path)

            logs = self.docker.build(path=build_path,
                                     tag=self.img_spec.target,
                                     nocache=nocache,
                                     rm=True)

            self.print_logs(logs)
            self.image = self.docker.images(self.img_spec.target)[0]
        except Exception as e:
            raise NhaDockerError("Failed to build from {}".format(
                self.repo)) from e
        else:
            self.tag_image()
            self.push_image()
            return self.image_id
        finally:
            if work_path is not None:
                work_path.dispose()
コード例 #3
0
ファイル: movers.py プロジェクト: Telres/noronha
    def new(self,
            name: str = None,
            model: str = None,
            train: str = None,
            ds: str = None,
            path: str = None,
            pretrained: str = None,
            skip_upload=False,
            lightweight=False,
            **kwargs):

        if path is None:
            raise NhaAPIError(
                "Cannot publish model version if path to model files is not provided"
            )

        model = Model.find_one(name=model)

        if lightweight:
            model.assert_movers_can_be_lightweight()

        if ds is not None:
            kwargs['ds'] = Dataset.find_one(name=ds, model=model).to_embedded()

        if train is not None:
            if self.proj is None:
                raise NhaAPIError(
                    "Cannot determine parent training if no working project is set"
                )
            else:
                kwargs['train'] = Training.find_one(
                    name=train, proj=self.proj.name).to_embedded()

        if pretrained is not None:
            kwargs['pretrained'] = ModelVersion.find_by_pk(
                pretrained).to_embedded()
            LOG.info(
                "Model version used pre-trained model '{}'".format(pretrained))

        mv: ModelVersion = super().new(name=name,
                                       model=model,
                                       lightweight=lightweight,
                                       **kwargs,
                                       _duplicate_filter=dict(name=name,
                                                              model=model))

        barrel = None

        try:
            if not skip_upload:
                barrel = self._store(mv, path)
        except Exception as e:
            LOG.warn("Reverting creation of model version '{}'".format(
                mv.name))
            mv.delete()
            if barrel is not None:
                barrel.purge(ignore=True)
            raise e

        return mv
コード例 #4
0
ファイル: utils.py プロジェクト: noronha-dataops/noronha
    def __call__(self,
                 ref_to_proj: str = None,
                 resolvers: list = (),
                 ignore: bool = False):

        proj = None

        for res in resolvers or self.ALL:
            assert res in self.ALL
            method = getattr(self, 'resolve_{}'.format(res))
            proj = method(ref_to_proj)

            if proj is not None:
                LOG.info("Working project is '{}'".format(proj.name))
                LOG.debug("Project resolution method was '{}'".format(res))
                break
        else:
            message = """Could not determine working project from reference '{}'""".format(
                ref_to_proj)
            details = """Resolvers used: {}""".format(resolvers)

            if ignore:
                LOG.info(message)
                LOG.debug(details)
            else:
                raise ResolutionError(message, details)

        return proj
コード例 #5
0
ファイル: shipyard.py プロジェクト: noronha-dataops/noronha
    def build(self, nocache: bool = False):

        source = '{}:{}'.format(self.repo.address, self.img_spec.tag)
        LOG.info("Moving pre-built image from {} to {}".format(
            source, self.img_spec.target))
        self.docker.pull(self.repo.address, tag=self.img_spec.tag)
        self.image = self.docker.images(source)[0]
        self.tag_image()
        self.push_image()
        return self.image_id
コード例 #6
0
    def show_exception(cls, exception, callback=None):

        if isinstance(exception, PrettyError):
            exc = exception.pretty()
        else:
            exc = PrettyError.parse_exc(exception)

        if callable(callback):
            detail = callback(exception)
            LOG.info(detail)

        LOG.error(exc)
コード例 #7
0
ファイル: shipyard.py プロジェクト: noronha-dataops/noronha
    def push_image(self):

        if self.img_spec.pushable:
            LOG.info("Pushing {}".format(self.img_spec.target))
            log = self.docker.push(self.img_spec.repo, tag=self.img_spec.tag)
            outcome = json.loads(Regex.LINE_BREAK.split(log.strip())[-1])

            if 'error' in outcome:
                raise NhaDockerError(
                    "Failed to push image '{}'. Error: {}".format(
                        self.img_spec.target, outcome.get('errorDetail')))
        else:
            LOG.warn("Docker registry is not configured. Skipping image push.")
コード例 #8
0
    def _save_output(self, note_path, output_path):

        try:
            LOG.info("Saving output notebook: {}".format(output_path))
            # TODO: convert to pdf (find a light-weight lib for that)
            NotebookBarrel(proj=self.proj,
                           notebook=note_path,
                           file_name=output_path).store_from_path(os.getcwd())
        except Exception as e:
            err = NhaStorageError(
                "Failed to save output notebook '{}'".format(output_path))
            e.__cause__ = e
            LOG.error(err)
コード例 #9
0
def version():
    """Framework's version"""

    LOG.echo("Noronha Dataops v%s" % FrameworkConst.FW_VERSION)
    pkg = pkg_resources.require(FrameworkConst.FW_NAME)[0]

    try:
        meta = pkg.get_metadata_lines('METADATA')
    except FileNotFoundError:
        meta = pkg.get_metadata_lines('PKG-INFO')

    for line in meta:
        if not line.startswith('Requires'):
            LOG.info(line)
コード例 #10
0
    def _run(self, **kwargs):

        try:
            LOG.debug("Notebook parameters:")
            LOG.debug(kwargs.get('parameters', {}))
            self.proc_mon.set_state(Task.State.RUNNING)
            pm.execute_notebook(**kwargs)
        except Exception as e:
            self._handle_exc(e)
            return False
        else:
            LOG.info("Notebook execution succeeded!")
            self.proc_mon.set_state(Task.State.FINISHED)
            return True
コード例 #11
0
ファイル: publish.py プロジェクト: noronha-dataops/noronha
    def __call__(self,
                 src_path: str = Paths.TMP,
                 details: dict = None,
                 version_name: str = None,
                 model_name: str = None,
                 uses_dataset: bool = True,
                 dataset_name: str = None,
                 uses_pretrained: bool = False,
                 pretrained_with: str = None,
                 lightweight: bool = False):

        version_name = version_name or self.train.name
        model_name = self._infer_parent_model(model_name)
        ds = self._infer_dataset(model_name, uses_dataset, dataset_name)
        mv = None
        err = None

        try:
            mv = self.mv_api.new(name=version_name,
                                 model=model_name,
                                 ds=ds.name if ds else None,
                                 train=self.train.name,
                                 path=src_path,
                                 details=details or {},
                                 pretrained=self._infer_pretrained(
                                     uses_pretrained, pretrained_with),
                                 lightweight=lightweight,
                                 _replace=True)
        except Exception as e:
            LOG.warn("Model version {}:{} publish failed".format(
                model_name, version_name))
            err = e

        if self.train.name:
            self.train.update(mover=mv, ds=ds)

        if err:
            raise err

        if get_purpose() == DockerConst.Section.IDE:
            LOG.info(
                "For testing purposes, model files will be moved to the deployed model path"
            )
            MetaCargo(docs=[mv], section=DockerConst.Section.IDE).deploy()
            MoversCargo(mv, local=True,
                        section=DockerConst.Section.IDE).move(src_path)

        return mv
コード例 #12
0
ファイル: note.py プロジェクト: noronha-dataops/noronha
    def __call__(self,
                 tag: str = DockerConst.LATEST,
                 port: int = NoteConst.HOST_PORT,
                 movers: list = None,
                 datasets: list = None,
                 **kwargs):

        LOG.info("Notebook IDE will be mapped to port {}".format(port))
        return NotebookExp(
            port=port,
            proj=self.proj,
            tag=tag,
            movers=[
                ModelVersion.find_by_pk(mv).to_embedded()
                for mv in movers or []
            ],
            datasets=[Dataset.find_by_pk(ds) for ds in datasets or []],
            resource_profile=kwargs.pop('resource_profile',
                                        None)).launch(**kwargs)
コード例 #13
0
 def launch(self, tasks=1, skip_build=False, just_build=False, **_):
     
     if not just_build:
         assert self.scallable or tasks == 1, MisusageError(
             "Plugin '{}' is not scallable".format(self.alias)
         )
         
         assert self.isle_compass.native, MisusageError(
             "There is no point in setting up the plugin '{}' because it's configured in 'foreign mode'"
             .format(self.alias)
         )
     
     if not skip_build:
         self.builder.build()
     
     if not just_build:
         super().launch(tasks=tasks)
         
         if self.isle_compass.port is not None:
             LOG.info("Mapping service '{}' to port {}".format(self.make_name(), self.isle_compass.port))
コード例 #14
0
    def rm(self, name, model):

        ds = self.doc().find_one(name=name, model=model)
        # TODO: check if dataset is not being used in a training right now
        ds.delete()

        if ds.stored:
            LOG.info("Purging dataset '{}' from the file manager".format(
                ds.show()))
            file_status = 'purged' if DatasetBarrel(ds).purge(
                ignore=True) else 'not_found'
        else:
            LOG.info("Dataset '{}' is not stored. Skipping purge".format(
                ds.show()))
            file_status = 'not_stored'

        return dict(name=name,
                    model=model,
                    record='removed',
                    files=file_status)