def _save_df(cls, df: pd.DataFrame) -> DPTmpFile: fn = DPTmpFile(ArrowFormat.ext) df = to_df(df) process_df(df) ArrowFormat.save_file(fn.name, df) log.debug(f"Saved df to {fn} ({os.path.getsize(fn.file)} bytes)") return fn
def _save_obj(cls, data: t.Any, as_json: bool) -> DPTmpFile: # import here as a very slow module due to nested imports from ..files import save fn = save(data, default_to_json=as_json) log.debug(f"Saved object to {fn} ({os.path.getsize(fn.file)} bytes)") return fn
def is_analytics_disabled() -> bool: """Determine the initial state for analytics if not already set""" # disable if globally disabled or in certain envs if _NO_ANALYTICS_FILE.exists() or ON_DATAPANE or _IN_PYTEST or _IN_DPSERVER: log.debug("Analytics disabled") return True return False
def init(config_env: str = "default", config: t.Optional[Config] = None): """Init an API config - this MUST handle being called multiple times""" if get_config() is not None: log.debug("Reinitialising client config") if config: set_config(config) else: config_f = load_from_envfile(config_env) log.debug(f"Loaded client environment from {config_f}")
def save_df(df: pd.DataFrame) -> DPTmpFile: """Export a df for uploading""" fn = DPTmpFile(ArrowFormat.ext) # create a copy of the df to process df = to_df(df) # process_df called in Arrow.save_file # process_df(df) ArrowFormat.save_file(fn.name, df) log.debug(f"Saved df to {fn} ({os.path.getsize(fn.file)} bytes)") return fn
def init(config_env: str = "default", config: t.Optional[Config] = None) -> Config: """ Init an API config - this MUST handle being called multiple times and only from the main-thread """ if get_config() is not None: log.debug("Reinitialising client config") if config: set_config(config) else: config = Config.load(config_env) return config
def load(cls, env: str = "default") -> "Config": """Load config for an environment and set globally""" config_f = cls.get_config_file(env) if not config_f.exists(): cls.create_default(env, config_f) with config_f.open("r") as f: c_yaml = yaml.safe_load(f) # load config obj from file config = dacite.from_dict(Config, c_yaml) config._env = env config._path = config_f log.debug(f"Loaded client environment from {config._path}") # check if stored file is out of date config.upgrade_config_format() # set to the global state set_config(config) return config
def get_config_file(env: str = DEFAULT_ENV) -> Path: config_f = APP_DIR / f"{env}.yaml" if not config_f.exists(): config_f.write_text(get_default_config()) log.debug(f"Creating default config file at {config_f}") return config_f
def update(self, **kwargs): # filter None values kwargs = {k: v for (k, v) in kwargs.items() if v is not None} self.res.patch(**kwargs) self.refresh() log.debug(f"Updated object {self.url}")
def delete(self): """Delete the object on the server""" self.res.delete() log.debug(f"Deleted object {self.url}")
def refresh(self): """Refresh the object with the latest data from the Datapane Server""" self.dto = self.res.get() log.debug(f"Refreshed {self.url}")
def __getattr__(self, attr): if self.has_dto and not attr.startswith("__"): log.debug(f"Proxying '{attr}' lookup to DTO") return getattr(self._dto, attr) # Default behaviour return self.__getattribute__(attr)
def refresh(self): """Refresh the object with the latest data from the Datapane Server - override to pull updated fields from dto to top-level """ self.dto = self.res.get() log.debug(f"Refreshed {self.url}")