Пример #1
0
 def _save_df(cls, df: pd.DataFrame) -> DPTmpFile:
     fn = DPTmpFile(ArrowFormat.ext)
     df = to_df(df)
     process_df(df)
     ArrowFormat.save_file(fn.name, df)
     log.debug(f"Saved df to {fn} ({os.path.getsize(fn.file)} bytes)")
     return fn
Пример #2
0
def run(
    name: str,
    parameter: Tuple[str],
    cache: bool,
    wait: bool,
    owner: str,
    show_output: bool,
):
    """Run a report"""
    params = process_cmd_param_vals(parameter)
    log.info(f"Running script with parameters {params}")
    script = api.Script.get(name, owner=owner)
    with api_error_handler("Error running script"):
        r = script.run(parameters=params, cache=cache)
    if wait:
        with click_spinner.spinner():
            while not r.is_complete():
                time.sleep(2)
                r.refresh()
            log.debug(f"Run completed with status {r.status}")
            if show_output:
                click.echo(r.output)
            if r.status == "SUCCESS":
                if r.result:
                    success_msg(f"Script result - '{r.result}'")
                if r.report:
                    report = api.Report.by_id(r.report)
                    success_msg(f"Report generated at {report.web_url}")
            else:
                failure_msg(
                    f"Script run failed/cancelled\n{r.error_msg}: {r.error_detail}"
                )

    else:
        success_msg(f"Script run started, view at {script.web_url}")
Пример #3
0
def setup_script(s: api.Script, env_dir: Path):
    """Setup the script - unpack & install deps"""
    # TODO - add local cache check here
    if env_dir.exists():
        log.debug("Package already exists, not redownloading")
        return None

    # download and unpack bundle locally into env_dir
    sdist = s.download_pkg()
    assert tarfile.is_tarfile(sdist), "Invalid sdist file"
    shutil.unpack_archive(sdist, extract_dir=env_dir, format="gztar")
    sdist.unlink()
    comp_r = compileall.compile_dir(env_dir, force=True, workers=1, quiet=1)
    if not comp_r:
        log.warning("Compiling script bundle failed - errors may occur")

    # install deps
    if s.requirements:
        pip_args = [sys.executable, "-m", "pip", "install"]
        if os.getuid() != 0 and not in_venv():
            # we're a normal/non-root user outside a venv
            pip_args.append("--user")
        pip_args.extend(s.requirements)
        log.debug(f"Calling pip as '{pip_args}'")
        subprocess.run(args=pip_args, check=True)
        importlib.invalidate_caches()  # ensure new packages are detected

    log.info(f"Successfully installed bundle for script {s.id}")
Пример #4
0
    def _gen_report(self, embedded: bool, title: str,
                    headline: str) -> t.Tuple[str, t.List[Path]]:
        """Build XML report document"""
        # convert Blocks to XML
        s = BuilderState(embedded)
        _s = self.top_block.to_xml(s)
        assert len(_s.elements) == 1

        # add main structure and Meta
        report_doc: Element = E.Report(
            E.Meta(
                E.Author("Anonymous"),  # TODO - get username from config?
                E.CreatedOn(timestamp()),
                E.Title(title),
                E.Headline(headline),
            ),
            E.Main(*_s.elements),
            version="1",
        )
        report_doc.set("{http://www.w3.org/XML/1998/namespace}id",
                       f"_{uuid.uuid4().hex}")

        # post_process and validate
        processed_report_doc = local_post_transform(
            report_doc, embedded="true()" if embedded else "false()")
        validate_report_doc(xml_doc=processed_report_doc)

        # convert to string
        report_str = etree.tounicode(processed_report_doc, pretty_print=True)
        log.debug("Built Report")
        log.info(report_str)
        return (report_str, _s.attachments)
Пример #5
0
def run_api(run_config: RunnerConfig) -> RunResult:
    """Bootstrap the recursive calls into run"""
    script = api.Script.by_id(run_config.script_id)
    # is the script compatible with the client runner/api
    if not is_version_compatible(
            __version__, script.api_version, raise_exception=False):
        log.warning(
            f"Script developed for an older version of Datapane ({script.api_version}) - "
            + "this run may fail, please update.")

    # TODO - we should pull param defaults from script and add in the call
    script.call(run_config.env, **run_config.format())

    # create the RunResult
    script_result = str(api.Result.get()) if api.Result.exists() else None
    report_id = None
    try:
        report = api._report.pop()
        log.debug(f"Returning report id {report.id}")
        report_id = report.id
    except IndexError:
        log.debug(
            "User script didn't generate report - perhaps result / action only"
        )

    return RunResult(report_id=report_id, script_result=script_result)
Пример #6
0
def is_version_compatible(
    provider_v_in: Union[str, v.Version],
    consumer_v_in: Union[str, v.Version],
    raise_exception: bool = True,
) -> bool:
    """
    Check provider supports consumer and throws exception if not

    Set the spec so that the consumer has to be within a micro/patch release of the provider
    NOTE - this isn't semver - breaks when have > v1 release as then treats minor as breaking,
    e.g. 2.2.5 is not compat with 2.1.5
    """
    consumer_v = v.Version(consumer_v_in) if isinstance(consumer_v_in, str) else consumer_v_in
    provider_v = v.Version(provider_v_in) if isinstance(provider_v_in, str) else provider_v_in

    provider_spec = SpecifierSet(f"~={provider_v.major}.{provider_v.minor}.0")

    log.debug(f"Provider spec {provider_spec}, Consumer version {consumer_v}")
    if consumer_v not in provider_spec:
        if raise_exception:
            raise VersionMismatch(
                f"Consumer ({consumer_v}) and Provider ({provider_spec}) API versions not compatible"
            )
        return False
    return True
Пример #7
0
def script_env(env_dir: Path, env: SSDict) -> t.ContextManager[None]:
    """
    Change the local dir and add to site-path so relative files and imports work
    TODO
        - this is NOT thread-safe - unlikely we can run multiple concurrent scripts atm
        - this doesn't save env's as a call-stack directly - however handled implictly via Python stack anyway
    """
    cwd = os.getcwd()
    log.debug(f"[cd] {cwd} -> {env_dir}")
    if not env_dir.exists():
        env_dir.mkdir(parents=True)

    full_env_dir = str(env_dir.resolve())
    sys.path.insert(0, full_env_dir)
    os.chdir(full_env_dir)
    os.environ.update(env)
    try:
        yield
    finally:
        for env_key in env.keys():
            with suppress(KeyError):
                os.environ.pop(env_key)
        try:
            sys.path.remove(full_env_dir)
        except ValueError as e:
            raise CodeError(
                "sys.path not as expected - was it modified?") from e
        os.chdir(cwd)

        log.debug(f"[cd] {cwd} <- {env_dir}")
Пример #8
0
    def _save_obj(cls, data: t.Any, is_json: bool) -> DPTmpFile:
        # import here as a very slow module due to nested imports
        from ..files import save

        fn = save(data, default_to_json=is_json)
        log.debug(f"Saved object to {fn} ({os.path.getsize(fn.file)} bytes)")
        return fn
Пример #9
0
 def load_defaults(self, config_fn: NPath = DATAPANE_YAML) -> None:
     if not by_datapane:
         log.debug(f"loading parameter defaults from {config_fn}")
         # TODO - move dp-server parameter handling into common to use runnerconfig.format
         # NOTE - this is a bit hacky as we don't do any type formatting
         cfg = DatapaneCfg.create_initial(config_file=Path(config_fn))
         defaults = {p["name"]: p["default"] for p in cfg.parameters if "default" in p}
         self.update(defaults)
     else:
         log.debug("Ignoring call to load_defaults as by datapane")
Пример #10
0
def setup_api(dp_token: str,
              dp_host: str,
              debug: bool = False,
              logs: TextIO = None):
    """Init the Datapane API for automated use"""
    # login, ping, and create the default env file for CMD usage
    api.login(token=dp_token, server=dp_host, cli_login=False)
    # setup input and config, logging, login, etc.
    verbosity = 2 if debug else 0
    _setup_dp_logging(verbosity=verbosity, logs_stream=logs)
    log.debug("Running DP on DP")
Пример #11
0
def setup_api(dp_host: str,
              dp_token: str,
              debug: bool = False,
              logs: TextIO = None):
    """Init the Datapane API for automated use"""
    # setup input and config, logging, login, etc.
    config = c.Config(server=dp_host, token=dp_token, analytics=False)
    verbosity = 2 if debug else 0
    _setup_dp_logging(verbosity=verbosity, logs_stream=logs)
    c.init(config=config)
    # check can login/ping
    api.ping(config=config)
    log.debug("Running DP on DP")
Пример #12
0
    def _gen_report(self,
                    embedded: bool,
                    title: str,
                    description: str = "Description",
                    author: str = "Anonymous") -> t.Tuple[str, t.List[Path]]:
        """Build XML report document"""
        # convert Pages to XML
        s = BuilderState(embedded)
        _s = reduce(lambda _s, p: p._to_xml(_s), self.pages, s)

        # add main structure and Meta
        report_doc: Element = E.Report(
            E.Meta(
                E.Author(author),  # TODO - get username from config?
                E.CreatedOn(timestamp()),
                E.Title(title),
                E.Description(description),
            ),
            E.Main(*_s.elements, type=self.report_type.value),
            version="1",
        )
        report_doc.set("{http://www.w3.org/XML/1998/namespace}id",
                       f"_{uuid.uuid4().hex}")

        # post_process and validate
        processed_report_doc = local_post_transform(
            report_doc, embedded="true()" if embedded else "false()")
        validate_report_doc(xml_doc=processed_report_doc)

        # check for any unsupported local features, e.g. DataTable
        # NOTE - we could eventually have different validators for local and published reports
        if embedded:
            uses_datatable: bool = processed_report_doc.xpath(
                "boolean(/Report/Main//DataTable)")
            if uses_datatable:
                raise UnsupportedFeature(
                    "DataTable component not supported when saving locally, please publish to a Datapane Server or use dp.Table"
                )

        # convert to string
        report_str = etree.tounicode(processed_report_doc, pretty_print=True)
        log.debug("Built Report")
        log.debug(report_str)
        return (report_str, _s.attachments)
Пример #13
0
    def __init__(self,
                 *arg_blocks: BlockOrPrimitive,
                 blocks: t.Union[BlockDict, BlockList] = None,
                 **kw_blocks: BlockOrPrimitive):
        """
        Blocks can be created with the `name` parameter, if not set, one can be provided here using keyword args.
        Use the blocks dict parameter to add a dynamically generated set of named blocks, useful when working in Jupyter

        Args:
            *arg_blocks: List of blocks to add to document - if a name is not present it will be auto-generated
            blocks: Allows providing the document blocks as a single dictionary of named blocks
            **kw_blocks: Keyword argument set of blocks, whose block name will be that given in the keyword

        Returns:
            A `TextReport` object containing assets that can be uploaded for use with your online TextReport

        .. tip:: Blocks can be passed using either arg parameters or the `blocks` kwarg as a dictionary, e.g.
          `dp.TextReport(my_plot=plot, my_table=table)` or `dp.TextReport(blocks={"my_plot": plot, "my_table":table})`

        .. tip:: Create a dictionary first to hold your blocks to edit them dynmically, for instance when using Jupyter, and use the `blocks` parameter
        """
        super().__init__()

        # set the blocks
        def _conv_block(name: str, block: BlockOrPrimitive) -> Block:
            x = wrap_block(block)
            x._set_name(name)
            return x

        _blocks: BlockList
        if isinstance(blocks, dict):
            _blocks = [_conv_block(k, v) for (k, v) in blocks.items()]
        elif isinstance(blocks, list):
            _blocks = blocks
        else:
            # use arg and kw blocks
            _blocks = list(arg_blocks)
            _blocks.extend([_conv_block(k, v) for (k, v) in kw_blocks.items()])

        if not _blocks:
            log.debug("No blocks provided - creating empty report")

        # set the pages
        self.pages = [Page(blocks=[Group(blocks=_blocks, name="top-group")])]
Пример #14
0
def deploy(name: Optional[str], script: Optional[str], config: Optional[str],
           visibility: str):
    """Package and deploy a Python script or Jupyter notebook as a Datapane Script bundle"""
    script = script and Path(script)
    config = config and Path(config)
    init_kwargs = dict(visibility=visibility,
                       name=name,
                       script=script,
                       config_file=config)
    kwargs = {k: v for k, v in init_kwargs.items() if v is not None}

    # if not (script or config or sc.DatapaneCfg.exists()):
    #     raise AssertationError(f"Not valid project dir")

    dp_cfg = scripts.DatapaneCfg.create_initial(**kwargs)
    log.debug(f"Packaging and uploading Datapane project {dp_cfg.name}")

    # start the build process
    with scripts.build_bundle(dp_cfg) as sdist:

        if EXTRA_OUT:
            tf: tarfile.TarFile
            log.debug("Bundle from following files:")
            with tarfile.open(sdist) as tf:
                for n in tf.getnames():
                    log.debug(f"  {n}")

        r: api.Script = api.Script.upload_pkg(sdist, dp_cfg)
        success_msg(
            f"Uploaded {click.format_filename(str(dp_cfg.script))} to {r.web_url}"
        )
Пример #15
0
    def _gen_report(
        self,
        embedded: bool,
        title: str = "Title",
        description: str = "Description",
        author: str = "Anonymous",
    ) -> t.Tuple[str, t.List[Path]]:
        """Generate a report for saving/uploading"""
        report_doc, attachments = self._to_xml(embedded, title, description,
                                               author)

        # post_process and validate
        processed_report_doc = local_post_transform(
            report_doc, embedded="true()" if embedded else "false()")
        validate_report_doc(xml_doc=processed_report_doc)
        self._report_status_checks(processed_report_doc, embedded)

        # convert to string
        report_str = etree.tounicode(processed_report_doc)
        log.debug("Successfully Built Report")
        # log.debug(report_str)
        return (report_str, attachments)
Пример #16
0
    def create_initial(cls,
                       config_file: Path = None,
                       script: Path = None,
                       **kw) -> "DatapaneCfg":
        raw_config = {}

        if config_file:
            assert config_file.exists()
        else:
            config_file = DATAPANE_YAML

        if config_file.exists():
            # read config from the yaml file
            log.debug(f"Reading datapane config file at {config_file}")
            with config_file.open("r") as f:
                raw_config = yaml.safe_load(f)
        elif PYPROJECT_TOML.exists():
            # TODO - implement pyproject parsing
            log.warning(
                "pyproject.toml found but not currently supported - ignoring")
            raw_config = {}
        elif script:
            # we don't have a default config - perhaps in the script file
            # TODO - try read config from source-code
            abs_script = config_file.parent / script
            if script.suffix == ".ipynb":
                log.debug("Converting notebook")
                mod_code = extract_py_notebook(abs_script)
            else:
                mod_code = abs_script.read_text()
            log.debug("Reading config from python script/notebook")
            log.debug(mod_code)

        # overwrite config with command-line options
        if script:
            raw_config.update(script=script)
        raw_config.update(kw)
        readme = config_file.parent / "README.md"
        if readme.exists():
            raw_config["description"] = readme.read_text()
        elif "description" not in raw_config:
            raw_config["description"] = cls.description

        dp_cfg = dacite.from_dict(cls,
                                  data=raw_config,
                                  config=dacite.Config(cast=[Path]))
        return dp_cfg
Пример #17
0
 def __getattr__(self, attr):
     if self.has_dto and not attr.startswith("__"):
         log.debug(f"Proxying '{attr}' lookup to DTO")
         return getattr(self._dto, attr)
     # Default behaviour
     return self.__getattribute__(attr)
Пример #18
0
def save(obj: Any, default_to_json: bool = False) -> DPTmpFile:
    fn = get_wrapper(obj, default_to_json=default_to_json,
                     error_msg=None).write(obj)
    log.debug(f"Saved object to {fn} ({os.path.getsize(fn.file)} bytes)")
    return fn
Пример #19
0
 def refresh(self):
     """Update the local representation of the object"""
     self.dto = self.res.get()
     log.debug(f"Refreshed {self.url}")
Пример #20
0
 def delete(self):
     self.res.delete()
     log.debug(f"Deleted object {self.url}")
Пример #21
0
 def update(self, **kwargs):
     # filter None values
     kwargs = {k: v for (k, v) in kwargs.items() if v is not None}
     self.res.patch(**kwargs)
     self.refresh()
     log.debug(f"Updated object {self.url}")