Пример #1
0
    def get_or_refresh(self, cache_key: str, refresher: Callable, *args, max_age: int = DEFAULT_REFRESH_INTERVAL) \
            -> Tuple[int, Any]:
        assert isinstance(
            max_age, int
        ), "Invalid max_age provided for session, it must be of type <int>"

        last_write, val = self.get(cache_key)
        if Utils.millis_since_epoch() - last_write > max_age or not val:
            new_val = refresher(*args)
            self.write(cache_key, new_val)
            log.info(f"{cache_key} not found in cache. It was fetched.")
            return Utils.millis_since_epoch(), new_val
        else:
            log.info(f"Value for key: {cache_key} was found in cache.")
            return last_write, val
Пример #2
0
class FiggyMetrics(BaseModel):
    COUNT_KEY = 'count'
    user_id: str
    metrics: Dict[str, Dict] = {}
    last_report: int = Utils.millis_since_epoch()

    def increment_count(self, command: str) -> None:
        metric = self.metrics.get(command, {})
        metric[self.COUNT_KEY] = metric.get(self.COUNT_KEY, 0) + 1
        self.metrics[command] = metric
Пример #3
0
    def write(self, cache_key: str, object: Any) -> None:
        """
        Write an object to a local cache file. This will overwrite any existing values.
        :param cache_key: str - The key to write your object to local cache under. You will use this to lookup from
                the cache later.
        :param object: Any - Object to write, must be serializable by `json` library
        """
        try:
            object = list(object) if isinstance(
                object, Set) else object  # set is not json serializable
            contents: Dict = jsonpickle.decode(self.__read())
            contents[cache_key] = {
                self._STORE_KEY: object,
                self._LAST_WRITE_KEY: Utils.millis_since_epoch(),
                self._LAST_REFRESH_KEY: Utils.millis_since_epoch()
            }

            self.__write(jsonpickle.dumps(contents))
        except Exception as e:
            print(f"Error writing to cache key: {cache_key}: {e}")
Пример #4
0
    def get_parameter_names(self) -> Set[str]:
        """
        Looks up local cached configs, then queries new config names from the remote cache, merges the two, and
        finally updates the local cache. This ensures very fast bootstrap times b/c querying thousands of parameter
        names from a remote cache can a bit too much time. `figgy` does not accept slow performance.

        :return: Set[str] names of all configs stored in ParameterStore.
        """
        cache_key = f'{self._run_env.env}-{self._PS_NAME_CACHE_KEY}'

        # Find last cache full refresh date
        last_refresh = self._cache_mgr.last_refresh(cache_key)

        # Do a full refresh if cache is too old.
        if Utils.millis_since_epoch(
        ) - last_refresh > self.__CACHE_REFRESH_INTERVAL:
            configs: Set[ConfigItem] = self._config_dao.get_config_names_after(
                0)
            all_parameters: Set[str] = set(
                [x.name for x in configs if x.state == ConfigState.ACTIVE])
            self._cache_mgr.write(cache_key, all_parameters)
        else:

            # Get items from cache
            last_write, cached_contents = self._cache_mgr.get(cache_key)

            # Find new items added to remote cache table since last local cache write
            updated_items: Set[
                ConfigItem] = self._config_dao.get_config_names_after(
                    last_write)

            # Add new names to cache
            added_names, deleted_names = set(), set()
            for item in updated_items:
                if item.state is ConfigState.ACTIVE:
                    added_names.add(item.name)
                elif item.state is ConfigState.DELETED:
                    deleted_names.add(item.name)
                else:
                    # Default to add if no state set
                    added_names.add(item.name)

            self._cache_mgr.append(cache_key, added_names)
            self._cache_mgr.delete(cache_key, deleted_names)

            log.debug(f"Cached: {cached_contents}")
            log.debug(f"Cached: {deleted_names}")
            log.debug(f"Cached: {added_names}")

            all_parameters = set(cached_contents) - deleted_names | added_names

        return all_parameters
Пример #5
0
    def delete(self, cache_key: str, objects: Union[Dict, Set[Any]]):
        """
        If any of these items exist in the cache for this set of stored values, delete them.
        :param cache_key: Key to potentially delete items from
        :param objects: *Keys* in the cached DICT to delete, or items in a cached LIST to delete.
        """

        if isinstance(objects, Set):
            objects = list(objects)

        if len(objects) > 0:
            log.info(
                f'Deleting {len(objects)} items from local cache: {objects}')

            contents: Dict = jsonpickle.decode(self.__read())
            cache = contents.get(cache_key, {})
            refresh_time = cache.get(self._LAST_REFRESH_KEY, 0)
            cache_obj = cache.get(self._STORE_KEY)
            log.info(f'In cache: {cache_obj}')

            if isinstance(cache_obj, Dict) and isinstance(
                    objects, Dict) or cache_obj is None:
                log.info(f'Cache Obj is a dict')
                if cache_obj:
                    for obj in objects:
                        del cache_obj[obj]

            elif isinstance(cache_obj, List) and isinstance(
                    objects, List) or cache_obj is None:
                log.info(f"Cache obj is a list..")
                if cache_obj:
                    cache_obj = list(set(cache_obj) - set(objects))

            else:
                raise RuntimeError(
                    "Invalid state detected. Cache contains an invalid type that cannot be appended to, "
                    "or the type provided does not match the type stored in the cache."
                )

            log.info(f'New cache obj: {cache_obj}')
            contents[cache_key] = {
                self._STORE_KEY: cache_obj,
                self._LAST_WRITE_KEY: Utils.millis_since_epoch(),
                self._LAST_REFRESH_KEY: refresh_time,
            }

            self.__write(jsonpickle.dumps(contents))
        else:
            log.info('No cached items found to add to cache.')
Пример #6
0
    def append(self, cache_key: str, objects: Union[Dict, Set[Any]]):
        """
        Add a set or dictionary of items to the existing cache. Must be the SAME type as what is stored in the
        existing cache under cache_key.

        Lists not supported due to issues with duplicates continually growing in the local cache :)

        :param cache_key: Key to append or merge items with
        :param objects: Objects to add.
        """

        if isinstance(objects, Set):
            objects = list(objects)

        if len(objects) > 0:
            log.debug(
                f'Appending {len(objects)} items to local cache: {objects}')

            contents: Dict = jsonpickle.decode(self.__read())
            cache = contents.get(cache_key, {})
            refresh_time = cache.get(self._LAST_REFRESH_KEY, 0)
            cache_obj = cache.get(self._STORE_KEY)

            if isinstance(cache_obj, Dict) and isinstance(
                    objects, Dict) or cache_obj is None:
                if cache_obj:
                    cache_obj.update(objects)
                else:
                    cache_obj = objects
            elif isinstance(cache_obj, List) and isinstance(
                    objects, List) or cache_obj is None:
                cache_obj = list(set(cache_obj +
                                     objects)) if cache_obj else objects
            else:
                raise RuntimeError(
                    "Invalid state detected. Cache contains an invalid type that cannot be appended to, "
                    "or the type provided does not match the type stored in the cache."
                )

            contents[cache_key] = {
                self._STORE_KEY: cache_obj,
                self._LAST_WRITE_KEY: Utils.millis_since_epoch(),
                self._LAST_REFRESH_KEY: refresh_time,
            }

            self.__write(jsonpickle.dumps(contents))
        else:
            log.info('No cached items found to add to cache.')
Пример #7
0
        def inner(self, *args, **kwargs):
            if os.environ.get(
                    AnonymousUsageTracker._DISABLE_METRICS_ENV_VAR) == "true":
                return function(self, *args, **kwargs)

            command = getattr(self, 'type', None)
            log.info(f'GOt command {command}')

            if command:
                command = command.name
                cache = CacheManager(AnonymousUsageTracker._CACHE_NAME)

                if hasattr(self, 'context') and hasattr(
                        self.context,
                        'defaults') and self.context.defaults is not None:
                    if isinstance(self.context.defaults, CLIDefaults):
                        user_id = self.context.defaults.user_id
                    else:
                        user_id = "EmptyDefaults"
                else:
                    user_id = "NoOne"

                last_write, metrics = cache.get(
                    AnonymousUsageTracker._METRICS_KEY,
                    default=FiggyMetrics(user_id=user_id))

                metrics.increment_count(command)
                if Utils.millis_since_epoch(
                ) - metrics.last_report > AnonymousUsageTracker.REPORT_FREQUENCY:
                    defaults = FiggySetup.stc_get_defaults(skip=True)
                    if defaults and defaults.usage_tracking:
                        # Ship it async. If it don't worky, oh well :shruggie:
                        with ThreadPoolExecutor(max_workers=1) as pool:
                            pool.submit(AnonymousUsageTracker.report_usage,
                                        metrics)
                            log.info(
                                f'Reporting anonymous usage for metrics: {metrics}'
                            )
                            cache.write(AnonymousUsageTracker._METRICS_KEY,
                                        FiggyMetrics(user_id=user_id))
                            return function(self, *args, **kwargs)
                else:
                    cache.write(AnonymousUsageTracker._METRICS_KEY, metrics)

            return function(self, *args, **kwargs)
Пример #8
0
    def main(self):
        global npy_form
        self._browse_box = Form(
            name=
            "Browse Parameters: - 'e' to expand, 'c' to collapse, <s> to select, <d> to delete, "
            "<Tab> & <Shift+Tab> moves cursor between `OK` and `Tree` views.")

        # Value Box Relative Location
        val_relx, val_rely = int(self._browse_box.columns /
                                 2) * -1, int(self._browse_box.lines - 1) * -1
        val_max_height = int(self._browse_box.lines / 2) - self.BUFFER
        val_max_width = int(self._browse_box.columns / 2) - self.BUFFER

        # Selection Box Relative Location
        sel_relx, sel_rely = int(
            self._browse_box.columns /
            2) * -1, int(self._browse_box.lines / 2 + 1) * -1
        sel_max_height = int(self._browse_box.lines / 2) - self.BUFFER
        sel_max_width = int(self._browse_box.columns / 2) - self.BUFFER

        tree = self._browse_box.add(LogicalMLTree,
                                    on_select_callable=self.on_select,
                                    on_delete_callable=self.on_delete,
                                    max_width=self._browse_box.columns +
                                    val_relx - self.BUFFER)

        self.value_box = self._browse_box.add_widget(SelectedValueBox,
                                                     name="Parameter Value: ",
                                                     relx=val_relx,
                                                     rely=val_rely,
                                                     max_height=val_max_height,
                                                     max_width=val_max_width,
                                                     allow_filtering=False,
                                                     editable=False)

        self.select_state_box = self._browse_box.add_widget(
            SelectedValueBox,
            name="Selections: ",
            relx=sel_relx,
            rely=sel_rely,
            max_height=sel_max_height,
            max_width=sel_max_width,
            allow_filtering=False,
            editable=False)

        td = DeletableNPSTreeData(content='Root',
                                  selectable=True,
                                  expanded=True,
                                  ignoreRoot=True)
        start = Utils.millis_since_epoch()
        children = []
        if self._browse.prefix:
            prefix_child = td.newChild(content=self._browse.prefix,
                                       selectable=False,
                                       expanded=False)
            children = [prefix_child]
        else:
            log.info(
                f"--{prefix.name} missing, defaulting to normal browse tree.")

            for namespace in self._config_view.get_authorized_namespaces():
                child = td.newChild(content=namespace,
                                    selectable=False,
                                    expanded=False)
                children.append(child)

        for child in children:
            self._browse.dirs.add(child.getContent())

        futures = []

        with ThreadPoolExecutor(max_workers=10) as pool:
            for child in children:
                futures.append(
                    pool.submit(self._browse.add_children, child.getContent(),
                                child))

        for future in as_completed(futures):
            pass

        tree.values = td
        self._browse_box.edit()
        selection_objs = tree.get_selected_objects(return_node=True)

        for selection in selection_objs:
            full_path = ''
            while selection._parent is not None:
                full_path = selection.content + full_path
                selection = selection._parent
            self._browse.selected_ps_paths.append(full_path)

        delete_objs = tree.get_objects_to_delete(return_node=True)
        for selection in delete_objs:
            full_path = ''
            while selection._parent is not None:
                full_path = selection.content + full_path
                selection = selection._parent
            self._browse.deleted_ps_paths.append(full_path)