def get_parameter_logs(self, refresh: bool = False) -> PaginatedResponse: page: int = int(self.get_param('page', default=0, required=False)) size: int = int(self.get_param('size', default=15)) sort_key: str = self.get_param('sort-key', default='time') sort_direction: str = self.get_param('sort-direction', default='asc') before: int = self.get_param('before', required=False) parameter: str = self.get_param('name', required=True) filter: str = self.get_param('filter', required=False) # by default filter by date. matching_logs: List[UserLog] = self._usage(refresh).get_parameter_activity(parameter) log.info(f'Got matching logs {matching_logs}') if before: matching_logs = [l for l in matching_logs if l.time < before] if filter: # If filter is applied, use multiple threads to lookup values to match by. with ThreadPool(processes=self.MAX_THREADS) as pool: futures = [] for user_log in matching_logs: futures.append(pool.apply_async(self._usage().hydrate_user_log, args=(user_log,))) matching_logs = [f.get() for f in futures] matching_logs = [l for l in matching_logs if Utils.property_matches(l, filter)] sorted_logs = sorted(matching_logs, key=lambda x: x.__dict__.get(sort_key), reverse=False if sort_direction == 'asc' else True) sorted_page = sorted_logs[page * size: page * size + size] total = len(matching_logs) # Now that we have the page, hydrate values. sorted_page = [self._usage().hydrate_user_log(user_log) for user_log in sorted_page] return PaginatedResponse(data=sorted_page, total=total, page_size=size, page_number=page)
def get_usage_logs(self, not_retrieved_since: int, filter: str = None) -> List[UsageLog]: old_names: Dict[UsageLog] = {} new_names: Dict[UsageLog] = {} # Find old names, if you find 2 logs with the same name, keep the latest log. # This is using a generator so it is 'O'n time complexity. for usage_log in self._usage.find_logs_by_time( before=not_retrieved_since, filter=filter): if usage_log > old_names.get(usage_log, 0): old_names[usage_log] = usage_log for usage_log in self._usage.find_logs_by_time( after=not_retrieved_since, filter=filter): if usage_log > new_names.get(usage_log, 0): new_names[usage_log] = usage_log old_names_set: Set[UsageLog] = set(old_names.values()) new_names_set: Set[UsageLog] = set(new_names.values()) log.info( f'Found old log names: {old_names_set} and new names: {new_names_set}' ) stale_fig_logs = list(old_names_set - new_names_set) # Remove logs for any Figs that have already been deleted. active_parameters = self._cfg.get_parameter_names() # All figs that have been retrieved at least once and are still active. stale_fig_logs = [ stale_log for stale_log in stale_fig_logs if stale_log.parameter_name in active_parameters ] stale_fig_names: Set[str] = set( [stale_log.parameter_name for stale_log in stale_fig_logs]) # Find figs never retrieved but currently active never_retrieved = active_parameters.difference(stale_fig_names) never_retrieved_logs: List[UsageLog] = [ UsageLog.empty(name) for name in never_retrieved ] if filter: never_retrieved_logs = [ l for l in never_retrieved_logs if Utils.property_matches(l, filter) ] return stale_fig_logs + never_retrieved_logs
def get_audit_logs( self, refresh: bool = False) -> Union[PaginatedResponse, List[AuditLog]]: page: int = int(self.get_param('page', default=0, required=False)) size: int = int(self.get_param('size', default=15)) filter: str = self.get_param( 'filter', required=False) # by default filter by date. sort_key: str = self.get_param('sort-key', default='time') sort_direction: str = self.get_param('sort-direction', default='asc') before: int = self.get_param('before', required=False) after: int = self.get_param('after', required=False) parameter_type: str = self.get_param('type', required=False) parameter_name: str = self.get_param('name', required=False) one_page: bool = self.get_param('one-page', required=False, default='false').lower() == 'true' if parameter_name: matching_logs: List[AuditLog] = self._audit( refresh).get_parameter_logs(parameter_name) if filter: matching_logs = [ l for l in matching_logs if Utils.property_matches(l, filter) ] else: matching_logs: List[AuditLog] = self._audit( refresh).get_audit_logs_matching(parameter_type=parameter_type, filter=filter, before=before, after=after) sorted_logs = sorted( matching_logs, key=lambda x: x.__dict__.get(sort_key), reverse=False if sort_direction == 'asc' else True) sorted_page = sorted_logs[page * size:page * size + size] total = len(matching_logs) sorted_page = [ self._audit().hydrate_audit_log(audit_log) for audit_log in sorted_page ] if one_page: return sorted_logs else: return PaginatedResponse(data=sorted_page, total=total, page_size=size, page_number=page)